4

假设我有一个允许用户选择宽度、高度、颜色和持续时间的页面。然后我希望页面生成具有这些尺寸和持续时间的视频,该视频只是该颜色的静态帧,然后将在该页面上的 <video> 标记中播放。我可以使用 HTML5 MediaSource API 来执行此操作吗?

例如:如果用户选择 704x396 + rgb(0, 0, 0) + 00:00:15,那么我想生成一个 <video> 标签来播放大小为 704x396、15 秒长且全黑的视频。如果您熟悉Aegisub 的虚拟视频功能,那基本上就是我想要模仿的。

据我所知,MediaSource 只允许我在浏览器支持的实际视频编解码器中提供原始视频字节。它没有任何“更高级别”的 API,例如让我提供单个帧的东西。

如果不是 MediaSource,还有其他方法可以做到这一点(当然除了捆绑编译为 JS 的 webm / MP4 编码器)?

4

2 回答 2

8

使用canvas.captureStream()MediaRecoder

HTMLCanvasElement-captureStream

媒体记录器

目前在 Firefox 中工作。也许很快有一天会在 Chrome 中工作。

于 2015-12-29T09:50:58.390 回答
1

根据罗伯特的回答,我有一个使用三件事的解决方案:

  1. canvas.captureStream(), 以获取画布内容的流。
  2. new MediaRecorder(stream), 将该流转换为视频元素可以使用的视频格式。
  3. MediaSource

sideshowbarker 的回答(现已删除)跳过 MediaRecorder 并直接将流分配为视频 src。这个 /works/,但不适合我的目的,因为视频具有动态的、不断增加的持续时间,并且不能像流一样被搜索。这就是为什么我确实需要一个 MediaRecorder 来录制一段视频,然后用 MSE 一遍又一遍地重复它以达到所需的长度。

下面是我想要制作的端到端示例。正如罗伯特所提到的,它目前可能只适用于 FF。截至 2021-02,它在 FF 84 中有效,在 Chromium 87 中无效。

// @ts-check

"use strict";

/**
 * @typedef {{
 *     data: Blob,
 * }} BlobEvent
 *
 * @typedef {{
 *     state: "inactive" | "paused" | "recording";
 *     addEventListener(type: "dataavailable", listener: (event: BlobEvent) => void): void;
 *     pause(): void;
 *     resume(): void;
 *     start(timeslice: number): void;
 *     stop(): void;
 * }} MediaRecorder
 */

addEventListener("DOMContentLoaded", () => {
    /** @type {HTMLVideoElement} */
    const video = document.querySelector('#video');

    document.querySelector("#go").addEventListener("click", async () => {
        /** @type {HTMLInputElement} */
        const colorInput = document.querySelector("#video-color");

        /** @type {HTMLSelectElement} */
        const resolutionInput = document.querySelector("#video-resolution");
        const [width, height] = resolutionInput.value.split("x");

        /** @type {HTMLInputElement} */
        const durationInput = document.querySelector("#video-duration");

        await makeVideo(
            colorInput.value,
            [parseInt(width), parseInt(height)],
            parseInt(durationInput.value) * 60,
            video,
        );

        video.play();
    }, false);
}, false);

/**
 * Creates a video of the given color, dimensions and duration, and prepares the given video element to play it.
 *
 * @param {string} color
 * @param {[number, number]} dimensions
 * @param {number} duration
 * @param {HTMLVideoElement} video
 */
async function makeVideo(color, [width, height], duration, video) {
    video.width = width;
    video.height = height;

    const canvas = document.createElement("canvas");
    canvas.width = width;
    canvas.height = height;

    /** @type {CanvasRenderingContext2D} */
    const context = canvas.getContext("2d");
    context.fillStyle = color;
    context.fillRect(0, 0, width, height);

    /** @type {MediaStream} */
    const stream = canvas.captureStream(30);
    /** @type {MediaRecorder} */
    const recorder = new MediaRecorder(stream);

    recorder.start(1); // Get as many events as possible to have a chance at getting the smallest possible chunk.

    requestAnimationFrame(function drawCanvas() {
        if (recorder.state === "inactive") {
            // recorder has stopped. No need to draw any more.
            return;
        }

        requestAnimationFrame(drawCanvas);

        // Frames aren't generated if the canvas isn't painted.
        //
        // Ref: https://bugzilla.mozilla.org/show_bug.cgi?id=1277476
        context.fillRect(0, 0, 0, 0);
    });

    /** @type {Promise<[MediaSource, SourceBuffer, ArrayBuffer]>} */
    const p = new Promise(resolve => {
        /** @type {Blob | null} */
        let blob = null;

        recorder.addEventListener("dataavailable", async event => {
            if (recorder.state === "inactive") {
                // Being called after recorder.stop(). Do nothing.
                return;
            }

            if (event.data.size === 0) {
                console.warn("No new data.");
                return;
            }

            recorder.pause(); // Don't get flooded with new blobs while parsing the current blob.

            if (blob === null) {
                blob = event.data;
            }
            else {
                blob = new Blob([blob, event.data], { type: blob.type });
            }

            // Data is available but may not contain any frames. Test for that.
            try {
                const [[mediaSource, sourceBuffer], buffer] = await Promise.all([newMediaSourceAndBuffer(video, blob.type), blobToArrayBuffer(blob)]);
                await appendBuffer(sourceBuffer, buffer);
                console.log(`Got enough data for ${ getEndTime(sourceBuffer) } seconds.`);

                resolve([mediaSource, sourceBuffer, buffer]);
                recorder.stop();
            }
            catch (ex) {
                console.warn(ex);
                console.warn("Waiting for more data...");

                recorder.resume();
            }
        });
    });

    const [mediaSource, sourceBuffer, buffer] = await p;
    await appendBufferUntil(sourceBuffer, buffer, duration);
    mediaSource.endOfStream();
}

/**
 * Sets up the given `video` to use a new MediaSource, and appends a new SourceBuffer of the given `type`.
 *
 * @param {HTMLVideoElement} video
 * @param {string} type
 * @returns {Promise<[MediaSource, SourceBuffer]>}
 */
function newMediaSourceAndBuffer(video, type) {
    return new Promise((resolve, reject) => {
        const mediaSource = new MediaSource();

        function onSourceOpen() {
            mediaSource.removeEventListener("sourceopen", onSourceOpen, false);

            try {
                const sourceBuffer = mediaSource.addSourceBuffer(type);

                resolve([mediaSource, sourceBuffer]);
            }
            catch (ex) {
                reject(ex);
            }
        }

        mediaSource.addEventListener("sourceopen", onSourceOpen, false);

        video.src = URL.createObjectURL(mediaSource);
    });
}

/**
 * Converts a Blob to an ArrayBuffer
 *
 * @param {Blob} blob
 * @returns {Promise<ArrayBuffer>}
 */
function blobToArrayBuffer(blob) {
    return new Promise((resolve, reject) => {
        const fileReader = new FileReader();

        fileReader.addEventListener("load", () => {
            resolve(fileReader.result);
        }, false);
        fileReader.addEventListener("error", event => {
            reject(event);
        });

        fileReader.readAsArrayBuffer(blob);
    });
}

/**
 * Appends the given video data `buffer` to the given `sourceBuffer`.
 *
 * @param {SourceBuffer} sourceBuffer
 * @param {ArrayBuffer} buffer
 * @returns {Promise<void>}
 */
function appendBuffer(sourceBuffer, buffer) {
    return new Promise((resolve, reject) => {
        const currentEndTime = getEndTime(sourceBuffer);

        function onUpdateEnd() {
            sourceBuffer.removeEventListener("updateend", onUpdateEnd, false);

            if (sourceBuffer.buffered.length === 0) {
                reject(new Error(`buffer of length ${ buffer.byteLength } could not be appended to sourceBuffer. It's probably too small and doesn't contain any frames.`));
                return;
            }

            const newEndTime = getEndTime(sourceBuffer);
            if (newEndTime === currentEndTime) {
                reject(new Error("sourceBuffer is not increasing in size. Perhaps buffer is too small?"));
                return;
            }

            resolve();
        }

        sourceBuffer.addEventListener("updateend", onUpdateEnd, false);

        sourceBuffer.timestampOffset = currentEndTime;
        sourceBuffer.appendBuffer(buffer);
    });
}

/**
 * Repeatedly appends the given video data `buffer` to the given `sourceBuffer` until it is of `duration` length.
 *
 * @param {SourceBuffer} sourceBuffer
 * @param {ArrayBuffer} buffer
 * @param {number} duration
 * @returns {Promise<void>}
 */
async function appendBufferUntil(sourceBuffer, buffer, duration) {
    for (;;) {
        const currentEndTime = getEndTime(sourceBuffer);
        if (currentEndTime >= duration) {
            break;
        }

        await appendBuffer(sourceBuffer, buffer);
    }
}

/**
 * Gets the end time of a SourceBuffer.
 *
 * @param {SourceBuffer} sourceBuffer
 * @returns {number}
 */
function getEndTime(sourceBuffer) {
    return (sourceBuffer.buffered.length === 0) ? 0 : sourceBuffer.buffered.end(0);
}
<div>
    <label>Color: <input type="color" id="video-color" value="#2fa3fe"></input></label>
    <label>Resolution: 
        <select id="video-resolution">
            <option value="320x240" selected="selected">320 x 240</option>
            <option value="640x480">640 x 480</option>
            <option value="1280x720">1280 x 720</option>
        </select>
    </label>
    <label>Duration: <input type="number" id="video-duration" value="25"></input> mins</label>
    <button type="button" id="go">Go</button>
</div>
<video id="video" controls="controls" loop="loop"></video>

于 2015-12-30T09:12:27.090 回答