我有一个使用 canvas.captureStream() 的画布流。我有来自 webrtc 视频通话的另一个视频流。现在我想将画布流与视频流的音轨混合。我该怎么做?
问问题
3558 次
1 回答
4
使用 Firefox 和 Chrome 56 中可用的MediaStream
构造函数将曲目组合成一个新流:
let stream = new MediaStream([videoTrack, audioTrack]);
以下内容在 Firefox 中对我有用(在 Chrome 中使用https 小提琴,尽管它在录制时出错):
navigator.mediaDevices.getUserMedia({audio: true})
.then(stream => record(new MediaStream([stream.getTracks()[0],
whiteNoise().getTracks()[0]]), 5000)
.then(recording => {
stop(stream);
video.src = link.href = URL.createObjectURL(new Blob(recording));
link.download = "recording.webm";
link.innerHTML = "Download recording";
log("Playing "+ recording[0].type +" recording:");
})
.catch(log))
.catch(log);
var whiteNoise = () => {
let ctx = canvas.getContext('2d');
ctx.fillRect(0, 0, canvas.width, canvas.height);
let p = ctx.getImageData(0, 0, canvas.width, canvas.height);
requestAnimationFrame(function draw(){
for (var i = 0; i < p.data.length; i++) {
p.data[i++] = p.data[i++] = p.data[i++] = Math.random() * 255;
}
ctx.putImageData(p, 0, 0);
requestAnimationFrame(draw);
});
return canvas.captureStream(60);
}
var record = (stream, ms) => {
var rec = new MediaRecorder(stream), data = [];
rec.ondataavailable = e => data.push(e.data);
rec.start();
log(rec.state + " for "+ (ms / 1000) +" seconds...");
var stopped = new Promise((y, n) =>
(rec.onstop = y, rec.onerror = e => n(e.error || e.name)));
return Promise.all([stopped, wait(ms).then(_ => rec.stop())]).then(_ => data);
};
var stop = stream => stream.getTracks().forEach(track => track.stop());
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
var log = msg => div.innerHTML += "<br>" + msg;
<div id="div"></div><br>
<canvas id="canvas" width="160" height="120" hidden></canvas>
<video id="video" width="160" height="120" autoplay></video>
<a id="link"></a>
于 2016-10-13T04:30:16.643 回答