1

我希望找到如何使用 pygst 创建 MPEG-4 视频文件(可以建议其他框架)。

问题分为三个部分

  • 如何从 Python 帧缓冲区在 pygst 管道中提供生成的视频数据

  • 如何将此流保存到 MPEG-4 文件

  • 如何将此流与 MP3 音频源混合

伪代码如下:

for frame in range(0, 10000): # let's render 10000 frames of video
        data = []
        for y in range(0, height):
                for x in range(0, width):
                        data[y*width+x] = random.randint(0, 2**31) # rgba pixel
        # XXX: how to feed th video frame generated above in GStreamer pipeline            
        # and save it MPEG-4 file
        pass

更多信息:

http://lists.freedesktop.org/archives/gstreamer-devel/2011-August/032609.html

4

1 回答 1

2

您可能想要创建一个appsink元素,然后为每个帧创建一个新的GstBuffer并使用gst_app_src_push_buffer()将其推送到管道中。

这是一些示例代码,它不起作用 - ffmpeg 抱怨帧长度(?),但我想你明白了,还有一些提示。

import os

os.putenv('GST_DEBUG_DUMP_DOT_DIR', '/tmp')

import gst
import gobject
gobject.threads_init()
import logging
import random
import pdb

_log = logging.getLogger(__name__)
_log.setLevel(logging.DEBUG)
logging.basicConfig()

def framegenerator():
    '''
    Yields one frame of video per iteration
    '''
    height = 1080
    width = 1920
    for frame in range(0, 10000): # let's render 10000 frames of video
        data = list(range(height*width))
        for y in range(0, height):
            for x in range(0, width):
                #_log.debug(y*width+x)
                data[y*width+x] = random.randint(0, 2**31) # rgba pixel
        yield data

GENERATOR = framegenerator()

def feed_appsrc(bus, message):
    '''
    Feed the appsrc element with a new frame
    '''
    global appsrc, pipeline
    dotfile = "/tmp/debug-graph.dot"
    pngfile = "/tmp/pipeline.png"
    if os.access(dotfile, os.F_OK):
        os.remove(dotfile)
    if os.access(pngfile, os.F_OK):
        os.remove(pngfile)

    gst.DEBUG_BIN_TO_DOT_FILE(
        pipeline,
        gst.DEBUG_GRAPH_SHOW_ALL,
        'debug-graph')

    dot = '/usr/bin/dot'
    os.system(dot + " -Tpng -o " + pngfile + " " + dotfile)

    try:
        frame = GENERATOR.next()
        frame = str(frame)

        buf = gst.Buffer(
            frame)

        buf.set_caps(
            gst.caps_from_string('video/x-raw-rgb,framerate=30/1'))

        #_log.debug(buf)
        res = appsrc.emit('push-buffer', buf)
        _log.debug('Result: {0}'.format(res))
    except StopIteration:
        res = appsrc.emit('eos')
        _log.info('EOS')

def _on_message(bus, message):
    _log.debug(message)

pipeline = gst.Pipeline('pipeline')

appsrc = gst.element_factory_make('appsrc', 'appsrc')
# Connect feed_appsrc to the need-data signal
appsrc.connect('need-data', feed_appsrc)
appsrc.set_property('caps', 
    gst.caps_from_string(','.join([
                'video/x-raw-rgb',
                'framerate=30/1',
                'width=1920',
                'height=1080',
                'bpp=32',
                'depth=32',
                'green_mask=65280', #{0}'.format(0x00ff0000),
                'red_mask=255', #{0}'.format(0x000000ff),
                'blue_mask=16711680', #{0}'.format(0x000000ff),
                'alpha_mask=-16777216', #{0}'.format(),
                'endianness=4321'])))
pipeline.add(appsrc)

ffmpegcolorspace = gst.element_factory_make('ffmpegcolorspace')
pipeline.add(ffmpegcolorspace)

videorate = gst.element_factory_make('videorate')
pipeline.add(videorate)

ffvideoscale = gst.element_factory_make('videoscale')
pipeline.add(ffvideoscale)

vp8enc = gst.element_factory_make('vp8enc', 'vp8enc')
pipeline.add(vp8enc)

webmmux = gst.element_factory_make('webmmux', 'webmmux')
pipeline.add(webmmux)

filesink = gst.element_factory_make('filesink', 'filesink')
filesink.set_property('location', '/tmp/generated-video.webm')
pipeline.add(filesink)

gst.element_link_many(
    appsrc,
    ffmpegcolorspace,
    videorate,
    ffvideoscale,
    vp8enc,
    webmmux,
    filesink)

bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect('message', _on_message)

pipeline.set_state(gst.STATE_PLAYING)

pdb.set_trace()

gobject.MainLoop().run()

要点:https ://gist.github.com/ce60c620e7ef3dbd0779

于 2011-11-17T09:56:51.920 回答