0

我正在处理 siri 的语音到文本,但我得到的只是 audioEngine 没有响应。

audioEngine 启动一次,并在识别出第一个语音文本后停止,因为我想在一定时间内识别语音。任何建议表示赞赏。

这是我到目前为止所做的代码:

NSLocale *locale = [NSLocale localeWithLocaleIdentifier:@"en-US"];
speechRecognizer = [[SFSpeechRecognizer alloc]initWithLocale: locale];

audioEngine = [[AVAudioEngine alloc]init];

if (speechRecognitionTask != nil) {
    [audioEngine stop];
    [speechRecognitionTask cancel];
    speechRecognitionTask = nil;
}


AVAudioSession *audioSession = [AVAudioSession sharedInstance];

NSError *error = nil;

if(error == nil) {
    [audioSession setCategory:AVAudioSessionCategoryRecord error:&error];
    [audioSession setMode:AVAudioSessionModeMeasurement error:&error];
    [audioSession setActive:true error:&error];

} else
    NSLog(@"audioSession properties weren't set because of an error.");

recognitionRequest = [[SFSpeechAudioBufferRecognitionRequest alloc] init];

AVAudioInputNode *inputNode = audioEngine.inputNode;

if (inputNode == nil) {
    NSLog(@"AudioEngine has no input node");
    return;
}
recognitionRequest.shouldReportPartialResults = true;
error = nil;


speechRecognitionTask = [speechRecognizer recognitionTaskWithRequest:recognitionRequest resultHandler:^(SFSpeechRecognitionResult * result, NSError *  error1) {

    BOOL isFinal = false;

    if (result != nil) {
        self.textView.text = result.bestTranscription.formattedString;
        isFinal = ![result isFinal];
        [audioEngine stop];
        [inputNode removeTapOnBus: 0];

        recognitionRequest = nil;
        speechRecognitionTask = nil;

AVAudioFormat *recodringFormat = [inputNode outputFormatForBus:0];

[inputNode installTapOnBus:0 bufferSize:1024 format:recodringFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
    [recognitionRequest appendAudioPCMBuffer:buffer];
}];

NSError *error1;
[audioEngine prepare];

[audioEngine startAndReturnError:&error1];
if (error1 != nil) {
     NSLog(@"Error discription: %@", error1.description);
}
4

1 回答 1

0
-(void)startRecording
{
audioEngine = [[AVAudioEngine alloc]init];

if (speechRecognitionTask != nil) {
     speechRecognitionTask = nil;
    [speechRecognitionTask cancel];
}

[self showActivityIndicator];
self.textView.text = @"Please speak the business";
 audioSession = [AVAudioSession sharedInstance];

NSError *error = nil;

if(error == nil) {
    [audioSession setCategory:AVAudioSessionCategoryRecord error:&error];
    [audioSession setMode:AVAudioSessionModeMeasurement error:&error];
    [audioSession setActive:true withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:&error];

} else{
    NSLog(@"audioSession properties weren't set because of an error.");
}
recognitionRequest = [[SFSpeechAudioBufferRecognitionRequest alloc] init];

inputNode = audioEngine.inputNode;

if (inputNode == nil) {
    NSLog(@"AudioEngine has no input node");
}

if (recognitionRequest == nil) {
    NSLog(@"Unable to create and SFSpeechAudioBufferRecognitionRequest object");

}
recognitionRequest.shouldReportPartialResults = YES;
error = nil;


speechRecognitionTask = [speechRecognizer recognitionTaskWithRequest:recognitionRequest resultHandler:^(SFSpeechRecognitionResult * _Nullable result, NSError * _Nullable errorl) {

    BOOL isFinal;
    if (result != nil) {
        NSLog(@"Formatted String: %@ ",result.bestTranscription.formattedString);
        userFeedback.text = result.bestTranscription.formattedString;


        [NSTimer scheduledTimerWithTimeInterval:4 target:self selector:@selector(endRecordingAudio) userInfo:nil repeats:NO];

        isFinal = [result isFinal];
        if (isFinal) {
            [audioEngine stop];
            [inputNode removeTapOnBus: 0];
            recognitionRequest = nil;
            speechRecognitionTask = nil;
        }

        [self.microphoneButton setEnabled: true];
    }

    if (errorl) {
        NSLog(@"Error Description: %@", errorl);

    }
}];


AVAudioFormat *recodringFormat = [inputNode outputFormatForBus:0];

[inputNode installTapOnBus:0 bufferSize:1024 format:recodringFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
    [recognitionRequest appendAudioPCMBuffer:buffer];
}];

NSError *error1;

    [audioEngine prepare];

    [audioEngine startAndReturnError:&error1];

if (error1 != nil) {
    NSLog(@"Error discription: %@", error1.description);
     [self hideActivityIndicator];
}
userFeedback.text = @"Say something, I am listening!";
NSLog(@"Say something, I am listening!");

}


-(void)endRecordingAudio
{  
    NSLog(@"AudioEngine stopped");
    [audioEngine stop];
    [inputNode removeTapOnBus: 0];
    recognitionRequest = nil;
    speechRecognitionTask = nil;
}
于 2017-02-15T10:18:51.367 回答