3

我正在尝试使用 Apple 的 SFSpeechRecognition(xcode 9、ios11、swift 4)。以下代码使用计时器启动新的语音识别请求。我有背景功能,已授予麦克风使用权限和已授予语音识别权限。如果手机已解锁,一切都会按预期进行。但是当手机被锁定时,我收到以下错误:

2018-02-20 22:24:47.522562-0500 语音识别演示[3505:1234188] [实用程序] + [AFAggregator logDictationFailedWithError:] 错误域=kAFAssistantErrorDomain 代码=1700“(空)”

根据此链接,当应用程序处于后台模式时,语音识别似乎不起作用,但此信息已过时。我希望有人解决了这个问题或找到了解决方法。

有没有人解决了这个问题或者任何人都可以提出一些尝试?我的替代方法是为我的应用程序要求 Apple Watch,我真的很想避免这种情况......

import UIKit
import Speech

class SpeechDetectionViewController: UIViewController, 
SFSpeechRecognizerDelegate {

    @IBOutlet weak var detectedTextLabel: UILabel!
    @IBOutlet weak var colorView: UIView!
    @IBOutlet weak var startButton: UIButton!

    let audioEngine = AVAudioEngine()
    let speechRecognizer: SFSpeechRecognizer? = SFSpeechRecognizer()
    var request: SFSpeechAudioBufferRecognitionRequest?
    var recognitionTask: SFSpeechRecognitionTask?
    var isRecording = false

// timers
var timer = Timer()
let timerInterval = 5.0
var secondsElapsed = 0

// toggle for taking commands
var takeCommands = true

override func viewDidLoad() {
    super.viewDidLoad()
    self.requestSpeechAuthorization()
    timer = Timer.scheduledTimer(timeInterval: timerInterval,target: self,selector: #selector(timerAction(timer:)) ,userInfo: nil,repeats: true)
}

    @objc func timerAction(timer:Timer){
    /* if takeCommands {
        startRecording()
    } else {
        stopRecording()
    }
        takeCommands = !takeCommands
    print("takeCommands: \(takeCommands)")
 */
    startRecording()

}

//MARK: IBActions and Cancel

@IBAction func startButtonTapped(_ sender: UIButton) {
    startRecording()
}

func startRecording(){
    if isRecording {
        print("STOP talking.")
        request?.endAudio() // Added line to mark end of recording
        request = nil
        //audioEngine.stop()

        if let node = audioEngine.inputNode {
            node.removeTap(onBus: 0)
        }
        recognitionTask?.cancel()

        isRecording = false
        startButton.backgroundColor = UIColor.gray

    } else {
        print("START talking.")
        self.recordAndRecognizeSpeech()
        isRecording = true
        startButton.backgroundColor = UIColor.red
    }
}

func stopRecording() {
    //audioEngine.stop()
    if let node = audioEngine.inputNode {
        node.removeTap(onBus: 0)
    }
    recognitionTask?.cancel()
    isRecording = false
    startButton.backgroundColor = UIColor.gray
}

//MARK: - Recognize Speech

func recordAndRecognizeSpeech() {
    request = SFSpeechAudioBufferRecognitionRequest()
    guard let node = audioEngine.inputNode else { return }
    let recordingFormat = node.outputFormat(forBus: 0)
    node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
        self.request?.append(buffer)
    }
    if !audioEngine.isRunning {
        audioEngine.prepare()
        do {
            try audioEngine.start()
        } catch {
            self.sendAlert(message: "There has been an audio engine error.")
            return print(error)
        }
    }

    guard let myRecognizer = SFSpeechRecognizer() else {
        self.sendAlert(message: "Speech recognition is not supported for your current locale.")
        return
    }
    if !myRecognizer.isAvailable {
        self.sendAlert(message: "Speech recognition is not currently available. Check back at a later time.")
        // Recognizer is not available right now
        return
    }

    recognitionTask = speechRecognizer?.recognitionTask(with: request!, resultHandler: { result, error in
        if result != nil { // check to see if result is empty (i.e. no speech found)
            if let result = result {
                let bestString = result.bestTranscription.formattedString
                self.detectedTextLabel.text = bestString

                var lastString: String = ""
                for segment in result.bestTranscription.segments {
                    let indexTo = bestString.index(bestString.startIndex, offsetBy: segment.substringRange.location)
                    lastString = bestString.substring(from: indexTo)
                }
                self.checkForColorsSaid(resultString: lastString)

            } else if let error = error {
                self.sendAlert(message: "There has been a speech recognition error")
                print(error)
            }
        }
    })
}

//MARK: - Check Authorization Status

func requestSpeechAuthorization() {
SFSpeechRecognizer.requestAuthorization { authStatus in
    OperationQueue.main.addOperation {
        switch authStatus {
        case .authorized:
            self.startButton.isEnabled = true
        case .denied:
            self.startButton.isEnabled = false
            self.detectedTextLabel.text = "User denied access to speech recognition"
        case .restricted:
            self.startButton.isEnabled = false
            self.detectedTextLabel.text = "Speech recognition restricted on this device"
        case .notDetermined:
            self.startButton.isEnabled = false
            self.detectedTextLabel.text = "Speech recognition not yet authorized"
        }
    }
}
}

    //MARK: - UI / Set view color.

func checkForColorsSaid(resultString: String) {
    switch resultString {
    case "red":
        colorView.backgroundColor = UIColor.red
    case "orange":
        colorView.backgroundColor = UIColor.orange
    case "yellow":
        colorView.backgroundColor = UIColor.yellow
    case "green":
        colorView.backgroundColor = UIColor.green
    case "blue":
        colorView.backgroundColor = UIColor.blue
    case "purple":
        colorView.backgroundColor = UIColor.purple
    case "black":
        colorView.backgroundColor = UIColor.black
    case "white":
        colorView.backgroundColor = UIColor.white
    case "gray":
        colorView.backgroundColor = UIColor.gray
    default: break
    }
}

//MARK: - Alert

func sendAlert(message: String) {
    let alert = UIAlertController(title: "Speech Recognizer Error", message: message, preferredStyle: UIAlertControllerStyle.alert)
    alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.default, handler: nil))
    self.present(alert, animated: true, completion: nil)
}
}
4

0 回答 0