2

我以这种方式在客户端录制 MediaStream:

handleStream(stream) {
    const ws = new WebSocket('ws://localhost:5432/binary');
    var recorder = new MediaRecorder(stream);
    recorder.ondataavailable = function(event) {
        ws.send(event.data);
    };
    recorder.start();
}

这些数据在服务器端被接受,如下所示:

const wss = new WebSocket.Server({ port: 5432 });
wss.on('connection', function connection(ws) {
    ws.on('message', function incoming(message) {
        writeToDisk(message, 'video.webm');
    });
});

function writeToDisk(dataURL, fileName) {
    var fileBuffer = new Buffer(dataURL, 'base64');
    fs.writeFileSync(fileName, fileBuffer);
}

它就像一个魅力,但我想使用缓冲区并制作服务器端提供的视频直播流。有什么办法吗?

谢谢你的帮助。

4

1 回答 1

0

我已经在这里完成了。

在此处输入图像描述

您可以使用 MediaRecorder 类将视频分割成块并将它们发送到服务器进行广播。

this._mediaRecorder = new MediaRecorder(this._stream, this._streamOptions);
this._mediaRecorder.ondataavailable = e => this._videoStreamer.pushChunk(e.data);
this._mediaRecorder.start();
...
this._mediaRecorder.requestData()

不要忘记每隔一段时间重新开始录制,以免新客户端下载所有视频以连接到流。另外,在更换chunk的过程中,您应该更换或更新视频<video><image>海报,以使粘合顺利进行。

async function imageBitmapToBlob(img) {
    return new Promise(res => {
        const canvas = document.createElement('canvas');
        canvas.width = img.width;
        canvas.height = img.height;
        canvas.getContext('2d').drawImage(img,0,0);
        canvas.toBlob(res);
    });
}

...

const stream = document.querySelector('video').captureStream();

if(stream.active==true) {

    const track = stream.getVideoTracks()[0];
    const capturer = new ImageCapture(track);
    const bitmap = await imageBitmapToBlob(await capturer.grabFrame());

    URL.revokeObjectURL(this._oldPosterUrl);
    this._video.poster = this._oldPosterUrl = URL.createObjectURL(bitmap);
    track.stop();
}

您可以通过 Blob 对象的构造函数粘合 Blob 对象。在获取新块的过程中,不要忘记为旧视频清除内存URL.revokeObjectURL()并更新视频的当前时间

_updateVideo = async (newBlob = false) => {

    const stream = this._video.captureStream();

    if(stream.active==true) {

        const track = stream.getVideoTracks()[0];
        const capturer = new ImageCapture(track);
        const bitmap = await imageBitmapToBlob(await capturer.grabFrame());

        URL.revokeObjectURL(this._oldPosterUrl);
        this._video.poster = this._oldPosterUrl = URL.createObjectURL(bitmap);
        track.stop();
    }

    let data = null;
    if(newBlob === true) {
        const index = this._recordedChunks.length - 1;
        data = [this._recordedChunks[index]];
    } else {
        data = this._recordedChunks;
    }

    const blob = new Blob(data, this._options);
    const time = this._video.currentTime;

    URL.revokeObjectURL(this._oldVideoUrl);
    const url = this._oldVideoUrl = URL.createObjectURL(blob);

    if(newBlob === true) {
        this._recordedChunks = [blob];
    }

    this._size = blob.size;
    this._video.src = url;
    this._video.currentTime = time;
}

您应该使用两个 WebSocket 进行视频广播,两个用于收听。一个 WebSocket 仅传输视频块,第二个仅传输带有视频标头的新 blob(每隔一段时间重新开始录制)。

const blobWebSocket = new WebSocket(`ws://127.0.0.1:${blobPort}/`);
blobWebSocket.onmessage = (e) => {
    console.log({blob:e.data});
    this._videoWorker.pushBlob(e.data);
}

const chunkWebSocket = new WebSocket(`ws://127.0.0.1:${chunkPort}/`);
chunkWebSocket.onmessage = (e) => {
    console.log({chunk:e.data});
    this._videoWorker.pushChunk(e.data);
}

连接后,服务器向客户端发送所有当前视频 blob,并开始向客户端动态发送新块。

const wss = new WebSocket.Server({ port });
let buffer = new Buffer.alloc(0);

function chunkHandler(buf,isBlob=false) {

    console.log({buf,isBlob});

    if(isBlob === true) {
        //broadcast(wss,buf);
        buffer = buf;
    } else {
        const totalLenght = buffer.length + buf.length;
        buffer = Buffer.concat([buffer,buf],totalLenght);
        broadcast(wss,buf);
    }
}

wss.on('connection', function connection(ws) {
    if(buffer.length !== 0) {
        ws.send(buffer);
    }
});
于 2019-04-04T13:52:39.920 回答