1

我正在尝试将音频从 Raspberry Pi 流式传输到 VM。

Raspberry Pi 插入了一个麦克风,它的管道是这样的(IP/主机名信息已编辑):

gst-launch-1.0 -ev alsasrc device=plughw:1,0 ! audioconvert ! rtpL24pay ! udpsink host=xxxxx port=xxxx

VM 正在运行此管道:

gst-launch-1.0 -ev udpsrc port=xxxx caps="application/x-rtp, media=(string)audio, clock-rate=(int)44100, encoding-name=(string)L24, encoding-params=(string)2, channels=(int)2, payload=(int)96, ssrc=(uint)636287891, timestamp-offset=(uint)692362821, seqnum-offset=(uint)11479" ! rtpL24depay ! decodebin ! audioconvert ! wavenc ! filesink location=test.wav

当我用 Ctrl+C (加上 -e 开关)结束它时,通过命令行运行它就很好,并且文件是可读的。然而,我想做的是通过命令行保持管道在 Raspberry pi 上运行,但使用 Java 应用程序作为 VM 的管道。此 Java 应用程序连接到 REST 端点“/start”和“/stop”。“/start”启动管道,“/stop”应该停止管道并写入文件,但是当点击“/stop”端点时,文件大小为零并且不可读。我的代码在这篇文章的底部。任何关于改进管道或如何使文件可读的想法都会很棒。我最初的想法是这与我如何发送 EOS 消息有关,但不太确定。谢谢!

编辑:还忘了提到我在 Docker 容器中运行这个 JAR 文件,所以它可能与端口有关,但同样 - 不确定。

package service.rest.controllers;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.freedesktop.gstreamer.*;
import org.freedesktop.gstreamer.event.EOSEvent;
import org.freedesktop.gstreamer.message.EOSMessage;
import org.freedesktop.gstreamer.message.Message;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import service.config.ClientSettings;
import service.config.FileSettings;
import service.postgres.Database;
import service.postgres.ExecuteDatabase;

import java.text.SimpleDateFormat;
import java.util.Date;

//pipeline running on pi@raspberrypi:
//gst-launch-1.0 -ev alsasrc device=plughw:1,0 ! audioconvert ! rtpL24pay ! udpsink host=xxxxx port=xxxx
@RestController
public class AudioCaptureController {

    @Autowired
    public Database database;

    @Autowired
    ExecuteDatabase db_executor;

    @Autowired
    ClientSettings clientSettings;

    @Autowired
    FileSettings fileSettings;

    private static final Logger LOGGER = LogManager.getLogger(AudioCaptureController.class.getName());
    private static final String startTemplate = "Pipeline started at %s.";
    private static final String stopTemplate = "File recorded for time window %s to %s.";
    private static final SimpleDateFormat ft = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    private Pipeline pipe;
    private Date startTime;
    private int port;
    private int defaultLength;
    private int defaultRecordingDuration;
    private String defaultDirectory;

    public AudioCaptureController() {
    }

    /**
     * Initializes GStreamer pipeline.
     * udpsrc ! rtpL24depay ! decodebin ! audioconvert ! wavenc ! filesink
     */
    public void init() {
        port = clientSettings.getUdp_port();
        defaultLength = fileSettings.getDefault_length();
        defaultRecordingDuration = fileSettings.getDefault_recording_duration();
        defaultDirectory = fileSettings.getDefault_directory();

        Gst.init("Receiver");

        //CREATE ELEMENTS
        Element source = ElementFactory.make("udpsrc", "source");
        Element depayloader = ElementFactory.make("rtpL24depay", "depayloader");
        Element decoder = ElementFactory.make("decodebin", "decoder");
        Element converter = ElementFactory.make("audioconvert", "converter");
        Element encoder = ElementFactory.make("wavenc", "encoder");
        Element sink = ElementFactory.make("filesink", "sink");

        //CONFIGURE ELEMENTS
        Caps caps = Caps.fromString("application/x-rtp, " +
                "media=(string)audio, " +
                "clock-rate=(int)44100, " +
                "encoding-name=(string)L24, " +
                "encoding-params=(string)2, " +
                "channels=(int)2, " +
                "payload=(int)96, " +
                "ssrc=(uint)636287891, " +
                "timestamp-offset=(uint)692362821, " +
                "seqnum-offset=(uint)11479");
        source.set("port", port);
        source.setCaps(caps);

        //GENERATE WAV FILE - **Currently generating only one file**
        //todo: need a way to save specific file names. probably have to pause and restart the stream each time.
        //consider splitting the file post-processing
        //can't use multifilesink or splitmuxsink b/c no native support for wav
        //https://stackoverflow.com/questions/25662392/gstreamer-multifilesink-wav-files-splitting
        sink.set("location", defaultDirectory + "test.wav");
//        sink.set("location", "test.wav");

        //SET UP PIPELINE
        pipe = new Pipeline();
        pipe.addMany(source, depayloader, decoder, converter, encoder, sink);

        //LINK PADS
        source.link(depayloader);
        depayloader.link(decoder);
        decoder.link(converter);
        converter.link(encoder);
        encoder.link(sink);

        //HANDLE EOS/ERROR/WARNING ON THE BUS
        Bus bus = pipe.getBus();
        bus.connect((Bus.EOS) gstObject -> System.out.println("EOS " + gstObject));
        bus.connect((Bus.ERROR) (gstObject, i, s) -> System.out.println("ERROR " + i + " " + s + " " + gstObject));
        bus.connect((Bus.WARNING) (gstObject, i, s) -> System.out.println("WARN " + i + " " + s + " " + gstObject));
        bus.connect((Bus.EOS) obj -> {
            pipe.stop();
            Gst.deinit();
            Gst.quit();
        });
    }

    /**
     * Starts the GStreamer pipeline.
     */
    @RequestMapping("/start")
    public String startRecording() {
        //START PIPELINE
        pipe.play();

        startTime = new Date(System.currentTimeMillis());

        LOGGER.info(String.format(startTemplate, ft.format(startTime)));
        return String.format(startTemplate, ft.format(startTime));
    }

    /**
     * Stops the GStreamer pipeline and pushes the file to database.
     */
    @RequestMapping("/stop")
    public String stopRecording() {
//        if (pipe.isPlaying()) { //might have to comment this out
//            pipe.stop();
            pipe.getBus().post(new EOSMessage(pipe.getS));

//            Gst.quit();

            Date endTime = new Date(System.currentTimeMillis());
            String filePath = defaultDirectory + "test.wav";
            db_executor.insertRecord(database.getConnection(), ft.format(startTime), ft.format(endTime), filePath);

            LOGGER.info(String.format(stopTemplate, ft.format(startTime), ft.format(endTime)));
            return String.format(stopTemplate, ft.format(startTime), ft.format(endTime));
//        } else {
//            LOGGER.info("Pipeline is already at state " + pipe.getState());
//            return "Pipeline is already at state " + pipe.getState();
//        }
    }

}
4

1 回答 1

0

decodebin 有动态源 pads,你需要在它们出现时链接它们(它们在流开始之前不存在,因为 decodebin 无法知道它要处理什么以及它需要多少个 pads)。

https://gstreamer.freedesktop.org/documentation/application-development/basics/pads.html?gi-language=c

对于您的情况,您可能不需要它,因为您不需要解码,只需要 rtp depayloader。如果您想保留它,请确保注册一个pad-added回调,并且您将在它创建后得到它。在回调中,您应该将其链接到管道的其余部分(如果您愿意,您甚至可以在此时创建其余部分)。

于 2019-06-12T21:34:22.197 回答