6

我正在使用 AVAudioRecorder。我以caf格式录制声音。之后,我使用TPAACAudioConverter将文件 caf 转换为 aac 格式。它工作正常,但转换后的文件持续时间为 00:00。有什么方法可以获取 aac 音频文件的持续时间。

4

2 回答 2

2

AAC 格式不支持某些模拟器。您可以在您的设备上查看。它可以正常工作,您可能会获得 AAC 音频文件持续时间。

于 2013-08-01T09:47:53.287 回答
0

你能播放caf文件吗?

如果您只想将声音从麦克风录制到 aac 文件,您可以使用音频队列服务(我可以发布一些代码)

编辑:它是 Apple 开发教程中的一个实现,可能有一些错误,因为我对其进行了修改以适合您的问题

//AudioQ.mm
@implementation AudioQ
static const int nBuffer = 3;

struct AQRecorderState{
    AudioStreamBasicDescription mDataFormat;
    AudioQueueRef               mQueue;
    AudioQueueBufferRef         mBuffers[nBuffer];
    AudioFileID                 mAudioFile;
    UInt32                      bufferByteSize;
    SInt64                      mCurrentPacket;
    bool                        mIsRunning;
};

AQRecorderState aqData;
CFURLRef url;
static OSStatus BufferFilledHandler(
                                    void *                               inUserData,
                                    SInt64                               inPosition,
                                    UInt32                               requestCount,
                                    const void *                         buffer,
                                    UInt32 *                             actualCount

                                    ){
    // callback when you write to the file
    // you can handle audio packet and send them for broadcasting
    return 0;
}

static void HandleInputBuffer(
                              void                              *aqData,
                              AudioQueueRef                     inAq,
                              AudioQueueBufferRef                   inBuffer,
                              const AudioTimeStamp              *inStartTime,
                              UInt32                                inNumPackets,
                              const AudioStreamPacketDescription    *inPacketDesc
                              ) {
    AQRecorderState *pAqData = (AQRecorderState*) aqData;
    if (AudioFileWritePackets (
                               pAqData->mAudioFile,
                               false,
                               inBuffer->mAudioDataByteSize,
                               inPacketDesc,
                               pAqData->mCurrentPacket,
                               &inNumPackets,
                               inBuffer->mAudioData
                               ) == noErr) {
        pAqData->mCurrentPacket += inNumPackets;
    } else {
        NSLog(@"err writing packet");
    }
    if (pAqData->mIsRunning == 0)
        return;
    AudioQueueEnqueueBuffer(pAqData->mQueue,inBuffer,0,NULL);
}

-(OSStatus) initializeAQ{

    //--- set the output format ---//
    aqData.mDataFormat.mSampleRate = 22050;
    aqData.mDataFormat.mFormatID = kAudioFormatMPEG4AAC;
    aqData.mDataFormat.mFormatFlags = kMPEG4Object_AAC_Main;
    aqData.mDataFormat.mBytesPerPacket = 0;
    aqData.mDataFormat.mFramesPerPacket = 1024;
    aqData.mDataFormat.mBytesPerFrame = 0;
    aqData.mDataFormat.mChannelsPerFrame = 1;
    aqData.mDataFormat.mBitsPerChannel = 0;
    AudioFileTypeID fileType = kAudioFileAAC_ADTSType;

    aqData.bufferByteSize = 0x5000; // ??

    AudioQueueNewInput(&aqData.mDataFormat, HandleInputBuffer, &aqData, CFRunLoopGetMain(), kCFRunLoopCommonModes, 0, &aqData.mQueue);
    aqData.mCurrentPacket = 0;
    aqData.mIsRunning = true;


    //--- record in a file get the callback when writing ---//
    AQRecorderState *pAqData = &aqData;
    AudioFileInitializeWithCallbacks((void*)&pAqData,
                                     nil,
                                     BufferFilledHandler,
                                     nil,
                                     nil,
                                     fileType,
                                     &aqData.mDataFormat,
                                     kAudioFileFlags_EraseFile,
                                     &aqData.mAudioFile);

    //--- prepare set of audio queue buffers ---//
    for(int i = 0 ; i < nBuffer ; i++){
        AudioQueueAllocateBuffer(aqData.mQueue, aqData.bufferByteSize, &aqData.mBuffers[i]);
        AudioQueueEnqueueBuffer(aqData.mQueue, aqData.mBuffers[i], 0, NULL);
    }
    return 0;   
}

-(void) start{
    AudioQueueStart(aqData.mQueue, NULL);       
}

-(void) stop{
    NSLog(@"stoping");
    AudioQueueStop(aqData.mQueue, true);
    aqData.mIsRunning = false;
    AudioQueueDispose (aqData.mQueue,true);
    AudioFileClose (aqData.mAudioFile);
}
@end

音频Q.h

static void HandleInputBuffer(
                              void                                  *aqData,
                              AudioQueueRef                         inAq,
                              AudioQueueBufferRef                   inBuffer,
                              const AudioTimeStamp                  *inStartTime,
                              UInt32                                inNumPackets,
                              const AudioStreamPacketDescription    *inPacketDesc
                              );

static OSStatus BufferFilledHandler(
                                    void *                               inUserData,
                                    SInt64                               inPosition,
                                    UInt32                               requestCount,
                                    const void *                         buffer,
                                    UInt32 *                             actualCount
                                    );
-(OSStatus)initializeAQ;
-(void)stop;
-(void)start;
于 2013-07-25T14:17:09.207 回答