2

我正在使用 AVCaptureToAudioUnit 的示例代码使用 iPhone 作为麦克风录制语音。我使用此示例作为起点,因为与其他示例项目相比,我在此示例项目中取得了更大的成功。

当文件为 .aif 或 .caf 时,演示应用程序运行良好,但当我创建 .wav、.aac 或 .mp3 文件时,调试控制台中会出现以下消息

AudioStreamBasicDescription: 1 ch, 44100 Hz, 'lpcm' (0x0000000E) 16 位大端有符号整数

2013-07-26 19:52:06.653 AVCaptureToAudioUnit[2514:907] Failed to setup audio file! (29759)

要更改文件格式,我做了两个更改

[a] NSString 语句中的文件扩展名(参见下面的 init 更改)和

[b] 使用音频文件服务参考中定义的常量兼容设置(在 startRecording 中)。

使用其他文件格式时是否必须更改其他属性?有没有人遇到过这个问题?

这是 [a] 的代码

@implementation CaptureSessionController

#pragma mark ======== Setup and teardown methods =========

- (id)init
{
    self = [super init];

    if (self) {

    NSArray  *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.aac", documentsDirectory];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.caf", documentsDirectory];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.wav", documentsDirectory];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.mp3", documentsDirectory];

    // and the following statement is the line of code found in the original example

    NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.aif", documentsDirectory];
    _outputFile = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);

    [self registerForNotifications];
    }

return self;

}

这是 [b] 的代码

- (void)startRecording
{
    if (!self.isRecording) {
    OSErr err = kAudioFileUnspecifiedError;
    @synchronized(self) {
    if (!extAudioFile) {
/*
    Start recording by creating an ExtAudioFile and configuring it with the same 
    sample rate and channel layout as those of the current sample buffer.
*/
    // recording format is the format of the audio file itself
    CAStreamBasicDescription recordingFormat(currentInputASBD.mSampleRate, currentInputASBD.mChannelsPerFrame, CAStreamBasicDescription::kPCMFormatInt16, true);
    recordingFormat.mFormatFlags |= kAudioFormatFlagIsBigEndian;

    NSLog(@"Recording Audio Format:");
    recordingFormat.Print();

    err = ExtAudioFileCreateWithURL(_outputFile,

    //kAudioFileAAC_ADTSType,   // won't restart recording "Failed to setup audio file"
    //kAudioFileCAFType,        // starts and stops correctly
    //kAudioFileWAVEType,       // won't restart recording "Failed to setup audio file"
    //kAudioFileMP3Type,        // won't restart recording "Failed to setup audio file"
    kAudioFileAIFFType,       // starts and stops correctly

    &recordingFormat,
    currentRecordingChannelLayout,
    kAudioFileFlags_EraseFile,
    &extAudioFile);

    if (noErr == err)

    // client format is the output format from the delay unit
    err = ExtAudioFileSetProperty(extAudioFile, kExtAudioFileProperty_ClientDataFormat, sizeof(graphOutputASBD), &graphOutputASBD);

    if (noErr != err) {

                        if (extAudioFile) ExtAudioFileDispose(extAudioFile);
                        extAudioFile = NULL;
                      }
                  }
              } // @synchronized

    if (noErr == err) {
        self.recording = YES;
      NSLog(@"Recording Started");
       } else {
        NSLog(@"Failed to setup audio file! (%ld)", (long)err);
         }
    }
}
4

0 回答 0