0

我正在尝试使用 mediasoup 录制 WebRTC 流。我可以在 2015 年 13 月 14 日在 chrome 和 safari 上成功录制。但是在 Firefox 上不起作用。

客户端代码是一个 vue js 组件,它使用 socket.io 获取 rtp-compabilities 并在服务器创建传输后创建生产者。这在 chrome 和 safari 上效果很好。

const { connect , createLocalTracks } = require('twilio-video');
const SocketClient = require("socket.io-client");
const SocketPromise = require("socket.io-promise").default;
const MediasoupClient = require("mediasoup-client");

export default {
    data() {
        return {
            errors: [],
            isReady: false,
            isRecording: false,
            loading: false,
            sapio: {
                token: null,
                connectionId: 0
            },
            server: {
                host: 'https://rtc.test',
                ws: '/server',
                socket: null,
            },
            peer: {},
        }
    },
    mounted() {
        this.init();
    },
    methods: {
        async init() {
            await this.startCamera();

            if (this.takeId) {
                await this.recordBySapioServer();
            }
        },
        startCamera() {
            return new Promise( (resolve, reject) => {
                if (window.videoMediaStreamObject) {
                    this.setVideoElementStream(window.videoMediaStreamObject);
                    resolve();
                } else {
                    // Get user media as required
                    try {
                        this.localeStream = navigator.mediaDevices.getUserMedia({
                            audio: true,
                            video: true,
                        }).then((stream) => {
                            this.setVideoElementStream(stream);
                            resolve();
                        })
                    } catch (err) {
                        console.error(err);
                        reject();
                    }
                }
            })
        },
        setVideoElementStream(stream) {
            this.localStream = stream;
            this.$refs.video.srcObject = stream;
            this.$refs.video.muted = true;
            this.$refs.video.play().then((video) => {
                this.isStreaming = true;
                this.height = this.$refs.video.videoHeight;
                this.width = this.$refs.video.videoWidth;
            });
        },
        // first thing we need is connecting to websocket
        connectToSocket() {
            const serverUrl = this.server.host;
            console.log("Connect with sapio rtc server:", serverUrl);

            const socket = SocketClient(serverUrl, {
                path:  this.server.ws,
                transports: ["websocket"],
            });
            this.socket = socket;

            socket.on("connect", () => {
                console.log("WebSocket connected");
                // we ask for rtp-capabilities from server to send to us
                socket.emit('send-rtp-capabilities');
            });

            socket.on("error", (err) => {
                this.loading = true;
                console.error("WebSocket error:", err);
            });

            socket.on("router-rtp-capabilities", async (msg) => {
                const { routerRtpCapabilities, sessionId, externalId } = msg;
                console.log('[rtpCapabilities:%o]', routerRtpCapabilities);
                this.routerRtpCapabilities = routerRtpCapabilities;

                try {
                    const device = new MediasoupClient.Device();
                    // Load the mediasoup device with the router rtp capabilities gotten from the server
                    await device.load({ routerRtpCapabilities });

                    this.peer.sessionId = sessionId;
                    this.peer.externalId = externalId;
                    this.peer.device = device;

                    this.createTransport();
                } catch (error) {
                    console.error('failed to init device [error:%o]', error);
                    socket.disconnect();
                }
            });

            socket.on("create-transport", async (msg) => {
                console.log('handleCreateTransportRequest() [data:%o]', msg);

                try {
                    // Create the local mediasoup send transport
                    this.peer.sendTransport = await this.peer.device.createSendTransport(msg);
                    console.log('send transport created [id:%s]', this.peer.sendTransport.id);

                    // Set the transport listeners and get the users media stream
                    this.handleSendTransportListeners();
                    this.setTracks();
                    this.loading = false;
                } catch (error) {
                    console.error('failed to create transport [error:%o]', error);
                    socket.disconnect();
                }
            });

            socket.on("connect-transport", async (msg) => {
                console.log('handleTransportConnectRequest()');
                try {
                    const action = this.connectTransport;

                    if (!action) {
                        throw new Error('transport-connect action was not found');
                    }

                    await action(msg);
                } catch (error) {
                    console.error('ailed [error:%o]', error);
                }
            });

            socket.on("produce", async (msg) => {
                console.log('handleProduceRequest()');
                try {
                    if (!this.produce) {
                        throw new Error('produce action was not found');
                    }
                    await this.produce(msg);
                } catch (error) {
                    console.error('failed [error:%o]', error);
                }
            });

            socket.on("recording", async (msg) => {
                this.isRecording = true;
            });

            socket.on("recording-error", async (msg) => {
                this.isRecording = false;
                console.error(msg);
            });

            socket.on("recording-closed", async (msg) => {
                this.isRecording = false;
                console.warn(msg)
            });

        },
        createTransport() {
            console.log('createTransport()');

            if (!this.peer || !this.peer.device.loaded) {
                throw new Error('Peer or device is not initialized');
            }

            // First we must create the mediasoup transport on the server side
            this.socket.emit('create-transport',{
                sessionId: this.peer.sessionId
            });
        },
        handleSendTransportListeners() {
            this.peer.sendTransport.on('connect', this.handleTransportConnectEvent);
            this.peer.sendTransport.on('produce', this.handleTransportProduceEvent);
            this.peer.sendTransport.on('connectionstatechange', connectionState => {
                console.log('send transport connection state change [state:%s]', connectionState);
            });
        },
        handleTransportConnectEvent({ dtlsParameters }, callback, errback) {
            console.log('handleTransportConnectEvent()');
            try {
                this.connectTransport = (msg) => {
                    console.log('connect-transport action');
                    callback();
                    this.connectTransport = null;
                };

                this.socket.emit('connect-transport',{
                    sessionId: this.peer.sessionId,
                    transportId: this.peer.sendTransport.id,
                    dtlsParameters
                });

            } catch (error) {
                console.error('handleTransportConnectEvent() failed [error:%o]', error);
                errback(error);
            }
        },
        handleTransportProduceEvent({ kind, rtpParameters }, callback, errback)  {
            console.log('handleTransportProduceEvent()');
            try {
                this.produce = jsonMessage => {
                    console.log('handleTransportProduceEvent callback [data:%o]', jsonMessage);
                    callback({ id: jsonMessage.id });
                    this.produce = null;
                };

                this.socket.emit('produce', {
                    sessionId: this.peer.sessionId,
                    transportId: this.peer.sendTransport.id,
                    kind,
                    rtpParameters
                });
            } catch (error) {
                console.error('handleTransportProduceEvent() failed [error:%o]', error);
                errback(error);
            }
        },
        async recordBySapioServer() {
            this.loading = true;
            this.connectToSocket();
        },
        async setTracks() {
            // Start mediasoup-client's WebRTC producers
            const audioTrack = this.localStream.getAudioTracks()[0];
            this.peer.audioProducer = await this.peer.sendTransport.produce({
                track: audioTrack,
                codecOptions :
                    {
                        opusStereo : 1,
                        opusDtx    : 1
                    }
            });


            let encodings;
            let codec;
            const codecOptions = {videoGoogleStartBitrate : 1000};

            codec = this.peer.device.rtpCapabilities.codecs.find((c) => c.kind.toLowerCase() === 'video');
            if (codec.mimeType.toLowerCase() === 'video/vp9') {
                encodings = { scalabilityMode: 'S3T3_KEY' };
            } else {
                encodings = [
                    { scaleResolutionDownBy: 4, maxBitrate: 500000 },
                    { scaleResolutionDownBy: 2, maxBitrate: 1000000 },
                    { scaleResolutionDownBy: 1, maxBitrate: 5000000 }
                ];
            }
            const videoTrack = this.localStream.getVideoTracks()[0];
            this.peer.videoProducer =await this.peer.sendTransport.produce({
                track: videoTrack,
                encodings,
                codecOptions,
                codec
            });

        },
        startRecording() {
            this.Q.answer.recordingId = this.peer.externalId;
            this.socket.emit("start-record", {
                sessionId: this.peer.sessionId
            });
        },
        stopRecording() {
            this.socket.emit("stop-record" , {
                sessionId: this.peer.sessionId
            });
        },
    },

}
</script>

<style scoped>
.video-recorder_wrapper {
    position: relative;
    display: flex;
}
.video-camera {
    margin: 0;
    height: auto;
    width: auto;
    max-height: 350px;
    max-width: 100%;
    border-radius: 3px;
}
@media screen and (max-width: 600px) {
    .video-camera {
        width: calc(100% - 20px);
        max-height: 600px;
    }
}
</style>

我的 ffmpeg 进程的 console.log:

// sdp string
[sdpString:v=0
  o=- 0 0 IN IP4 127.0.0.1
  s=FFmpeg
  c=IN IP4 127.0.0.1
  t=0 0
  m=video 25549 RTP/AVP 101 
  a=rtpmap:101 VP8/90000
  a=sendonly
  m=audio 26934 RTP/AVP 100 
  a=rtpmap:100 opus/48000/2
  a=sendonly
  ]

// ffmpeg args
commandArgs:[
  '-loglevel',
  'debug',
  '-protocol_whitelist',
  'pipe,udp,rtp',
  '-fflags',
  '+genpts',
  '-f',
  'sdp',
  '-i',
  'pipe:0',
  '-map',
  '0:v:0',
  '-c:v',
  'copy',
  '-map',
  '0:a:0',
  '-strict',
  '-2',
  '-c:a',
  'copy',
  '-f',
  'webm',
  '-flags',
  '+global_header',
  '-y',
  'storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm',
  [length]: 26
]
// ffmpeg log
ffmpeg::process::data [data:'ffmpeg version n4.4']
ffmpeg::process::data [data:' Copyright (c) 2000-2021 the FFmpeg developers']
ffmpeg::process::data [data:'\n']
ffmpeg::process::data [data:'  built with gcc 11.1.0 (GCC)\n']
ffmpeg::process::data [data:'  configuration: --prefix=/usr --disable-debug --disable-static --disable-stripping --enable-amf --enable-avisynth --enable-cuda-llvm --enable-lto --enable-fontconfig --enable-gmp --enable-gnutls --enable-gpl --enable-ladspa --enable-libaom --enable-libass --enable-libbluray --enable-libdav1d --enable-libdrm --enable-libfreetype --enable-libfribidi --enable-libgsm --enable-libiec61883 --enable-libjack --enable-libmfx --enable-libmodplug --enable-libmp3lame --enable-libopencore_amrnb --enable-libopencore_amrwb --enable-libopenjpeg --enable-libopus --enable-libpulse --enable-librav1e --enable-librsvg --enable-libsoxr --enable-libspeex --enable-libsrt --enable-libssh --enable-libsvtav1 --enable-libtheora --enable-libv4l2 --enable-libvidstab --enable-libvmaf --enable-libvorbis --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxcb --enable-libxml2 --enable-libxvid --enable-libzimg --enable-nvdec --enable-nvenc --enable-shared --enable-version3\n']
ffmpeg::process::data [data:'  libavutil      56. 70.100 / 56. 70.100\n' +
  '  libavcodec     58.134.100 / 58.134.100\n' +
  '  libavformat    58. 76.100 / 58. 76.100\n' +
  '  libavdevice    58. 13.100 / 58. 13.100\n' +
  '  libavfilter     7.110.100 /  7.110.100\n' +
  '  libswscale      5.  9.100 /  5.  9.100\n' +
  '  libswresample   3.  9.100 /  3.  9.100\n' +
  '  libpostproc    55.  9.100 / 55.  9.100\n' +
  'Splitting the commandline.\n' +
  "Reading option '-loglevel' ... matched as option 'loglevel' (set logging level) with argument 'debug'.\n" +
  "Reading option '-protocol_whitelist' ..."]
ffmpeg::process::data [data:" matched as AVOption 'protocol_whitelist' with argument 'pipe,udp,rtp'.\n" +
  "Reading option '-fflags' ..."]
ffmpeg::process::data [data:" matched as AVOption 'fflags' with argument '+genpts'.\n" +
  "Reading option '-f' ... matched as option 'f' (force format) with argument 'sdp'.\n" +
  "Reading option '-i' ... matched as input url with argument 'pipe:0'.\n" +
  "Reading option '-map' ... matched as option 'map' (set input stream mapping) with argument '0:v:0'.\n" +
  "Reading option '-c:v' ... matched as option 'c' (codec name) with argument 'copy'.\n" +
  "Reading option '-map' ... matched as option 'map' (set input stream mapping) with argument '0:a:0'.\n" +
  "Reading option '-strict' ...Routing option strict to both codec and muxer layer\n" +
  " matched as AVOption 'strict' with argument '-2'.\n" +
  "Reading option '-c:a' ... matched as option 'c' (codec name) with argument 'copy'.\n" +
  "Reading option '-f' ... matched as option 'f' (force format) with argument 'webm'.\n" +
  "Reading option '-flags' ... matched as AVOption 'flags' with argument '+global_header'.\n" +
  "Reading option '-y' ... matched as option 'y' (overwrite output files) with argument '1'.\n" +
  "Reading option 'storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm' ... matched as output url.\n" +
  'Finished splitting the commandline.\n' +
  'Parsing a group of options: global .\n' +
  'Applying option loglevel (set logging level) with argument debug.\n' +
  'Applying option y (overwrite output files) with argument 1.\n' +
  'Successfully parsed a group of options.\n' +
  'Parsing a group of options: input url pipe:0.\n' +
  'Applying option f (force format) with argument sdp.\n' +
  'Successfully parsed a group of options.\n' +
  'Opening an input file: pipe:0.\n' +
  "[sdp @ 0x55604dc58400] Opening 'pipe:0' for reading\n" +
  '[sdp @ 0x55604dc58400] video codec set to: vp8\n' +
  '[sdp @ 0x55604dc58400] audio codec set to: opus\n' +
  '[sdp @ 0x55604dc58400] audio samplerate set to: 48000\n' +
  '[sdp @ 0x55604dc58400] audio channels set to: 2\n' +
  '[udp @ 0x55604dc6c500] end receive buffer size reported is 425984\n' +
  '[udp @ 0x55604dc6c7c0] end receive buffer size reported is 425984\n' +
  '[sdp @ 0x55604dc58400] setting jitter buffer size to 500\n' +
  '[udp @ 0x55604dc6d900] end receive buffer size reported is 425984\n' +
  '[udp @ 0x55604dc6d2c0] end receive buffer size reported is 425984\n' +
  '[sdp @ 0x55604dc58400] setting jitter buffer size to 500\n']
ffmpeg::process::data [data:'[sdp @ 0x55604dc58400] Before avformat_find_stream_info() pos: 210 bytes read:210 seeks:0 nb_streams:2\n']
  **mediasoup:Consumer resume() +1s**
  **mediasoup:Channel request() [method:consumer.resume, id:12] +1s**
  **mediasoup:Channel request succeeded [method:consumer.resume, id:12] +0ms**
  **mediasoup:Consumer resume() +1ms**
  **mediasoup:Channel request() [method:consumer.resume, id:13] +0ms**
  **mediasoup:Channel request succeeded [method:consumer.resume, id:13] +0ms**
ffmpeg::process::data [data:'[sdp @ 0x55604dc58400] Could not find codec parameters for stream 0 (Video: vp8, 1 reference frame, yuv420p): unspecified size\n' +
  "Consider increasing the value for the 'analyzeduration' (0) and 'probesize' (5000000) options\n"]
ffmpeg::process::data [data:'[sdp @ 0x55604dc58400] After avformat_find_stream_info() pos: 210 bytes read:210 seeks:0 frames:0\n' +
  "Input #0, sdp, from 'pipe:0':\n" +
  '  Metadata:\n' +
  '    title           : FFmpeg\n' +
  '  Duration: N/A, bitrate: N/A\n' +
  '  Stream #0:0, 0, 1/90000: Video: vp8, 1 reference frame, yuv420p, 90k tbr, 90k tbn, 90k tbc\n' +
  '  Stream #0:1, 0, 1/48000: Audio: opus, 48000 Hz, stereo, fltp\n' +
  'Successfully opened the file.\n' +
  'Parsing a group of options: output url storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm.\n' +
  'Applying option map (set input stream mapping) with argument 0:v:0.\n' +
  'Applying option c:v (codec name) with argument copy.\n' +
  'Applying option map (set input stream mapping) with argument 0:a:0.\n' +
  'Applying option c:a (codec name) with argument copy.\n' +
  'Applying option f (force format) with argument webm.\n' +
  'Successfully parsed a group of options.\n' +
  'Opening an output file: storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm.\n' +
  "[file @ 0x55604dce5bc0] Setting default whitelist 'file,crypto,data'\n"]
ffmpeg::process::data [data:'Successfully opened the file.\n' +
  '[webm @ 0x55604dce0fc0] dimensions not set\n' +
  'Could not write header for output file #0 (incorrect codec parameters ?): Invalid argument\n' +
  'Error initializing output stream 0:1 -- \n' +
  'Stream mapping:\n' +
  '  Stream #0:0 -> #0:0 (copy)\n' +
  '  Stream #0:1 -> #0:1 (copy)\n' +
  '    Last message repeated 1 times\n' +
  '[AVIOContext @ 0x55604dc6dcc0] Statistics: 0 seeks, 0 writeouts\n' +
  '[AVIOContext @ 0x55604dc69380] Statistics: 210 bytes read, 0 seeks\n']
ffmpeg::process::close

FFmpeg 说dimensions not setCould not write header for output file当我使用 Firefox 时。这可能足以理解问题,但如果您需要更多信息,您可以阅读服务器端的执行情况。总结起来,服务器端可以是这样的:假设我们在运行时使用以下函数初始化了工作程序和路由器。

    // Start the mediasoup workers
module.exports.initializeWorkers = async () => {
  const { logLevel, logTags, rtcMinPort, rtcMaxPort } = config.worker;

  console.log('initializeWorkers() creating %d mediasoup workers', config.numWorkers);

  for (let i = 0; i < config.numWorkers; ++i) {
    const worker = await mediasoup.createWorker({
      logLevel, logTags, rtcMinPort, rtcMaxPort
    });

    worker.once('died', () => {
      console.error('worker::died worker has died exiting in 2 seconds... [pid:%d]', worker.pid);
      setTimeout(() => process.exit(1), 2000);
    });

    workers.push(worker);
  }
};
module.exports.createRouter = async () => {
  const worker = getNextWorker();

  console.log('createRouter() creating new router [worker.pid:%d]', worker.pid);

  console.log(`config.router.mediaCodecs:${JSON.stringify(config.router.mediaCodecs)}`)

  return await worker.createRouter({ mediaCodecs: config.router.mediaCodecs });
};

我们传递router.rtpCompatibilities给客户。客户端获取rtpCompatibilities并创建设备并加载它。之后,必须在服务器端创建传输。

    const handleCreateTransportRequest = async (jsonMessage) => {

  const transport = await createTransport('webRtc', router);

  var peer;
  try {peer = peers.get(jsonMessage.sessionId);}
  catch{console.log('peer not found')}
  
  peer.addTransport(transport);

  peer.socket.emit('create-transport',{
    id: transport.id,
    iceParameters: transport.iceParameters,
    iceCandidates: transport.iceCandidates,
    dtlsParameters: transport.dtlsParameters
  });
};

然后在客户端也创建了传输之后,我们在事件发生时监听连接事件,我们请求服务器创建连接。

const handleTransportConnectRequest = async (jsonMessage) => {
  var peer;
  try {peer = peers.get(jsonMessage.sessionId);}
  catch{console.log('peer not found')}

  if (!peer) {
    throw new Error(`Peer with id ${jsonMessage.sessionId} was not found`);
  }

  const transport = peer.getTransport(jsonMessage.transportId);

  if (!transport) {
    throw new Error(`Transport with id ${jsonMessage.transportId} was not found`);
  }

  await transport.connect({ dtlsParameters: jsonMessage.dtlsParameters });
  console.log('handleTransportConnectRequest() transport connected');
  peer.socket.emit('connect-transport');
};

类似的事情发生在生产事件上。

const handleProduceRequest = async (jsonMessage) => {
  console.log('handleProduceRequest [data:%o]', jsonMessage);

  var peer;
  try {peer = peers.get(jsonMessage.sessionId);}
  catch{console.log('peer not found')}

  if (!peer) {
    throw new Error(`Peer with id ${jsonMessage.sessionId} was not found`);
  }

  const transport = peer.getTransport(jsonMessage.transportId);

  if (!transport) {
    throw new Error(`Transport with id ${jsonMessage.transportId} was not found`);
  }

  const producer = await transport.produce({
    kind: jsonMessage.kind,
    rtpParameters: jsonMessage.rtpParameters
  });

  peer.addProducer(producer);

  console.log('handleProducerRequest() new producer added [id:%s, kind:%s]', producer.id, producer.kind);

  peer.socket.emit('produce',{
    id: producer.id,
    kind: producer.kind
  });
};

对于录制,首先我为音频和视频制作者创建简单的传输。

const rtpTransport = router.createPlainTransport(config.plainRtpTransport);

那么 rtp 传输必须连接到端口:

  await rtpTransport.connect({
    ip: '127.0.0.1',
    port: remoteRtpPort,
    rtcpPort: remoteRtcpPort
  });

然后还必须创建消费者。

  const rtpConsumer = await rtpTransport.consume({
    producerId: producer.id,
    rtpCapabilities,
    paused: true
  });

之后,我们可以使用以下代码开始录制:

 this._rtpParameters = args;
    this._process = undefined;
    this._observer = new EventEmitter();
    this._peer = args.peer;

    this._sdpString = createSdpText(this._rtpParameters);
    this._sdpStream = convertStringToStream(this._sdpString);
    // create dir
    const dir = process.env.REOCRDING_PATH ?? 'storage/recordings';
    if (!fs.existsSync(dir)) shelljs.mkdir('-p', dir);
  
    this._extension = 'webm';
    // create file path
    this._path = `${dir}/${args.peer.sessionId}.${this._extension}`
    let loop = 0;
    while(fs.existsSync(this._path)) {
      this._path = `${dir}/${args.peer.sessionId}-${++loop}.${this._extension}`
    }

this._recordingnModel = await Recording.findOne({sessionIds: { $in: [this._peer.sessionId] }})
    this._recordingnModel.files.push(this._path);
    this._recordingnModel.save();

let proc  = ffmpeg(this._sdpStream)
    .inputOptions([
      '-protocol_whitelist','pipe,udp,rtp',
      '-f','sdp',
    ])
    .format(this._extension)
    .output(this._path)
    .size('720x?')
    .on('start', ()=>{
      this._peer.socket.emit('recording');
    })
    .on('end', ()=>{
      let path = this._path.replace('storage/recordings/', '');
      this._peer.socket.emit('recording-closed', {
        url: `${process.env.APP_URL}/recording/file/${path}`
      });
    });

    proc.run();
    this._process =  proc;
  }

4

0 回答 0