0

我正在开发一个使用 AudioWorklet 来录制音频的网络应用程序。请参阅此处的示例: https ://engagelab.uio.no/oslospell/#/debug

我相信我清除了录音之间的所有缓冲区(主线程中的 this.audioData),但不知何故:在 Chrome/PC 上,新的录音片段总是从上一次录音结束后大约 0.5 秒的音频开始。此外,如前所述,最后 0.5 秒没有记录,而是在下一次记录开始时插入。这不会在 iPad 上的 Safari 上发生;在那里它按预期工作。

重现:

  1. 转到https://engagelab.uio.no/oslospell/#/debug
  2. 点击“开始音频”
  3. 开始发出均匀的声音,点击“开始录音”,点击“停止录音”。
  4. 点击“播放测试”。你应该听到声音。
  5. 进行新的录音(“开始录音”,然后“停止录音”)。现在,当您单击“播放测试”时,您应该在新录音的前 0.5 秒内听到第一次录音的统一声音。录音也将停止有点太早了。

为什么会发生这种情况,我该如何解决这个问题?

供参考。我在 Safari/iPad 上的录音中遇到了延迟/不同步的大问题,但是在我实施断开/重新连接录音之间的音频图之后,问题完全解决了:

//On every stop recording:
this.micSource.connect(this.recorderNode);
this.recorderNode.connect(this.audioContext.destination)

//On every start recording:
this.micSource.connect(this.recorderNode);
this.recorderNode.connect(this.audioContext.destination);

AudioWorkletProcessor 代码:

/*
recorderWorkletProcessor2.js
Based on https://gist.github.com/theroman/155d07f9616f5b9a28c028376a247d24
*/

class RecorderWorkletProcessor extends AudioWorkletProcessor {
  constructor() {
    super();
    this._stopTime = 999999999;
    this._startTime = 0;
    this._stopRecording = true;
    this.port.onmessage = (e) => {
      const data = e.data;
      if (data.eventType == "stopRecording") {
        this._stopTime = data.stopTime;
        this._stopRecording = true;
      }

      if (data.eventType == "startRecording") {
        this._startTime = data.startTime;
        this._stopRecording = false;
      }
    };
    this._bufferSize = 2048;
    this._buffers = null;
    this._initBuffer();
    this._initBuffers(1); //numberOfChannels
  }

  _initBuffers(numberOfChannels) {
    this._buffers = [];
    for (let channel = 0; channel < numberOfChannels; channel++) {
      this._buffers.push(new Float32Array(this._bufferSize));
    }
  }

  _initBuffer() {
    this._bytesWritten = 0;
  }

  _isBufferEmpty() {
    return this._bytesWritten === 0;
  }

  _isBufferFull() {
    return this._bytesWritten === this._bufferSize;
  }

  _pushToBuffers(audioRawData, numberOfChannels) {
    if (this._isBufferFull()) {
      this._flush();
    }

    let dataLength = audioRawData[0].length;

    for (let idx = 0; idx < dataLength; idx++) {
      for (let channel = 0; channel < numberOfChannels; channel++) {
        let value = audioRawData[channel][idx];
        this._buffers[channel][this._bytesWritten] = value;
      }
      this._bytesWritten += 1;
    }
  }

  _flush() {
    let buffers = [];
    this._buffers.forEach((buffer, channel) => {
      if (this._bytesWritten < this._bufferSize) {
        buffer = buffer.slice(0, this._bytesWritten);
      }
      buffers[channel] = buffer;
    });
    this.port.postMessage({
      eventType: "data",
      audioBuffer: buffers,
      bufferSize: this._bufferSize,
    });
    this._initBuffer();
    this._initBuffers(1);
  }

  _recordingStopped() {
    this.port.postMessage({
      eventType: "stop",
    });
  }

  process(inputs, outputs, parameters) {
    if (inputs[0] == null) {
      console.log("FROM WORKLET: input is null");
      return;
    }
    if (this._buffers === null) {
      this._initBuffers(1);
      this._initBuffer();
    }

    if (
      this._stopRecording &&
      !this._isBufferEmpty() &&
      currentTime > this._stopTime
    ) {
      this._flush();
      this._recordingStopped();
    } else if (!this._stopRecording && currentTime > this._startTime) {
      this._pushToBuffers(inputs[0], 1); //data, numberOfChannels
    }
    return true;
  }
}

registerProcessor("recorder-worklet-2", RecorderWorkletProcessor);

相关主线程代码(它是一个 Vue 应用程序):

    //Global variables (this.)
      audioContext: null as any,
      recorderNode: null as any,
      micSource: null as any,
      audioData: [] as any,
      sampleRate: 0,
      numChannels: 1,


    //Methods:

    stopRecorderWorklet() {
      this.recorderNode.port.postMessage({
        eventType: "stopRecording",
      });
    },

    startRecorderWorklet() {
      this.micSource.connect(this.recorderNode);
      this.recorderNode.connect(this.audioContext.destination);
      console.log("start Recorder Worklet()");
      this.recorderNode.port.postMessage({
        eventType: "startRecording",
      });
    },

    async startAudio() {
      console.log("Initializing audio");
      //Find sample rate
      const deviceDetector = new DeviceDetector();
      const device = deviceDetector.parse(navigator.userAgent);
      let sampleRate;
      if (device.os?.name == "Mac") {
        sampleRate = 44100;
      } else {
        sampleRate = 48000;
      }
      this.sampleRate = sampleRate;
      //Get mic
      const constraints = { audio: true };
      const micStream = await navigator.mediaDevices.getUserMedia(constraints);
      this.handleSuccess(micStream);
    },

     async handleSuccess(micStream) {
      // Default || Safari and old versions of Chrome
      const AudioContext =
        window.AudioContext || (window as any).webkitAudioContext; //eslint-disable-line
      const audioContext = new AudioContext();
      this.audioContext = audioContext;
      this.micSource = audioContext.createMediaStreamSource(micStream);

      //Register the worklet
      try {
        await audioContext.audioWorklet.addModule(
          "worklet/recorderWorkletProcessor2.js"
        );
      } catch (error) {
        console.error("Error register the worklet", error);
        alert("Error register the worklet: " + error);
        return;
      }

      // Create worklet
      const recorderNode = new window.AudioWorkletNode(
        this.audioContext,
        "recorder-worklet-2",
        {
          channelCount: 1,
          channelCountMode: "explicit",
          channelInterpretation: "discrete",
        }
      );
      this.recorderNode = recorderNode;

      // Connect your source
      this.micSource.connect(recorderNode);
      recorderNode.connect(this.audioContext.destination);

      // Register worklet events
      recorderNode.port.onmessage = (e) => {
        const data = e.data;
        if (data.eventType == "startedRecording") {
          this.recStart = data.ts;
          this.debug = "startedRecording";
        } else if (data.eventType == "stop") {
          console.log("RECORDING STOPPED");
          this.recorderNode.disconnect(this.audioContext.destination);
          this.micSource.disconnect(this.recorderNode);

          // recording has stopped
          // process pcm data; encode etc

          //FLATTEN ARRAY
          const float32Flatten = (chunks) => {
            //get the total number of frames on the new float32array
            const nFrames = chunks.reduce((acc, elem) => acc + elem.length, 0);

            //create a new float32 with the correct number of frames
            const result = new Float32Array(nFrames);

            //insert each chunk into the new float32array
            let currentFrame = 0;
            chunks.forEach((chunk) => {
              result.set(chunk, currentFrame);
              currentFrame += chunk.length;
            });
            return result;
          };

          const audioBuffer = float32Flatten(this.audioData);

          //Make object for Wavencoder
          const wavObj = {
            sampleRate: this.sampleRate,
            channelData: [audioBuffer],
          };

          //Run WavEncoder
          const wavOutput = WavEncoder.encode.sync(wavObj);
          console.log("wavOutput", wavOutput);

          //Reset buffer
          this.audioData = [];

          //Prepare for playback
          const blob = new Blob([wavOutput], { type: "audio/wav" });
          const url = window.URL.createObjectURL(blob);
          this.testAudio = url;
        } else if (data.eventType == "data") {
          console.log("DATA RECIEVED");
          this.audioData.push(data.audioBuffer[0]); //channel 0
        }
      };


4

0 回答 0