首先让我先说 -我是 Janus / GStreamer / WebRTC 的新手。
我必须使用 GStreamer 和 WebRTC 将连接在机器人硬件上的远程摄像机流式传输到浏览器上。
但作为概念证明,我首先想用 videotestsrc 实现同样的效果。所以,我一直在努力实现以下目标:
- 构建 GStreamer 管道
- 使用 UDPSink 将其发送给 Janus
- 运行 Janus 网关并在浏览器(Chrome 和 Firefox)上显示测试视频流。
这是我到目前为止所做的:
1. 创建以下 GST 管道:
gst-launch-1.0 videotestsrc ! video/x-raw,width=1024,height=768,framerate=30/1 ! timeoverlay ! x264enc ! rtph264pay config-interval=1 pt=96 ! udpsink host=192.168.1.6 port=8004
2.我正在使用这个修改过的streamingtest.html代码: streamingtest2.js和streamingtest2.html:
var server = null;
if (window.location.protocol === 'http:') {
server = "http://" + window.location.hostname + ":8088/janus";
} else {
server = "https://" + window.location.hostname + ":8089/janus";
}
var janus = null;
var streaming = null;
var started = false;
var spinner = null;
var selectedStream = null;
$(document).ready(function () {
// Initialize the library (console debug enabled)
Janus.init({
debug: true, callback: function () {
startJanus();
}
});
});
function startJanus() {
console.log("starting Janus");
$('#start').click(function () {
if (started) {
return;
}
started = true;
// Make sure the browser supports WebRTC
if (!Janus.isWebrtcSupported()) {
console.error("No webrtc support");
return;
};
// Create session
janus = new Janus({
server: server,
success: function () {
console.log("Success");
attachToStreamingPlugin(janus);
},
error: function (error) {
console.log(error);
console.log("janus error");
},
destroyed: function () {
console.log("destroyed");
}
});
});
}
function attachToStreamingPlugin(janus) {
// Attach to streaming plugin
console.log("Attach to streaming plugin");
janus.attach({
plugin: "janus.plugin.streaming",
success: function (pluginHandle) {
streaming = pluginHandle;
console.log("Plugin attached! (" + streaming.getPlugin() + ", id=" + streaming.getId() + ")");
// Setup streaming session
updateStreamsList();
},
error: function (error) {
console.log(" -- Error attaching plugin... " + error);
console.error("Error attaching plugin... " + error);
},
onmessage: function (msg, jsep) {
console.log(" ::: Got a message :::");
console.log(JSON.stringify(msg));
processMessage(msg);
handleSDP(jsep);
},
onremotestream: function (stream) {
console.log(" ::: Got a remote stream :::");
console.log(JSON.stringify(stream));
handleStream(stream);
},
oncleanup: function () {
console.log(" ::: Got a cleanup notification :::");
}
});//end of janus.attach
}
function processMessage(msg) {
var result = msg["result"];
if (result && result["status"]) {
var status = result["status"];
switch (status) {
case 'starting':
console.log("starting - please wait...");
break;
case 'preparing':
console.log("preparing");
break;
case 'started':
console.log("started");
break;
case 'stopped':
console.log("stopped");
stopStream();
break;
}
} else {
console.log("no status available");
}
}
// we never appear to get this jsep thing
function handleSDP(jsep) {
console.log(" :: jsep :: ");
console.log(jsep);
if (jsep !== undefined && jsep !== null) {
console.log("Handling SDP as well...");
console.log(jsep);
// Answer
streaming.createAnswer({
jsep: jsep,
media: { audioSend: false, videoSend: false }, // We want recvonly audio/video
success: function (jsep) {
console.log("Got SDP!");
console.log(jsep);
var body = { "request": "start" };
streaming.send({ "message": body, "jsep": jsep });
},
error: function (error) {
console.log("WebRTC error:");
console.log(error);
console.error("WebRTC error... " + JSON.stringify(error));
}
});
} else {
console.log("no sdp");
}
}
function handleStream(stream) {
console.log(" ::: Got a remote stream :::");
console.log(JSON.stringify(stream));
// Show the stream and hide the spinner when we get a playing event
console.log("attaching remote media stream");
Janus.attachMediaStream($('#remotevideo').get(0), stream);
$("#remotevideo").bind("playing", function () {
console.log("got playing event");
});
}
function updateStreamsList() {
var body = { "request": "list" };
console.log("Sending message (" + JSON.stringify(body) + ")");
streaming.send({
"message": body, success: function (result) {
if (result === null || result === undefined) {
console.error("no streams available");
return;
}
if (result["list"] !== undefined && result["list"] !== null) {
var list = result["list"];
console.log("Got a list of available streams:");
console.log(list);
console.log("taking the first available stream");
var theFirstStream = list[0];
startStream(theFirstStream);
} else {
console.error("no streams available - list is null");
return;
}
}
});
}
function startStream(selectedStream) {
var selectedStreamId = selectedStream["id"];
console.log("Selected video id #" + selectedStreamId);
if (selectedStreamId === undefined || selectedStreamId === null) {
console.log("No selected stream");
return;
}
var body = { "request": "watch", id: parseInt(selectedStreamId) };
streaming.send({ "message": body });
}
function stopStream() {
console.log("stopping stream");
var body = { "request": "stop" };
streaming.send({ "message": body });
streaming.hangup();
}
<!--
// janus-gateway streamingtest refactor so I can understand it better
// GPL v3 as original
// https://github.com/meetecho/janus-gateway
// https://github.com/meetecho/janus-gateway/blob/master/html/streamingtest.js
-->
<!DOCTYPE html>
<html>
<head>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script type="text/javascript"
src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/js/bootstrap.min.js"></script>
<script type="text/javascript"
src="https://cdnjs.cloudflare.com/ajax/libs/bootbox.js/5.4.0/bootbox.min.js"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/spin.js/2.3.2/spin.min.js"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/toastr.js/2.1.4/toastr.min.js"></script>
<script type="text/javascript" src="janus.js"></script>
</head>
<body>
<div>
<button class="btn btn-default" autocomplete="off" id="start">Start</button><br />
<div id="stream">
<video controls autoplay id="remotevideo" width="320" height="240" style="border: 1px solid;">
</video>
</div>
</div>
<script type="text/javascript" src="streamingtest2.js"></script>
</body>
</html>
3. 我使用以下方式启动 Janus 服务器:
sudo /usr/local/janus/bin/janus
我的 Janus 流配置文件(janus.plugin.streaming.jcfg)看起来像这样(这里的完整文件:PasteBin 上的 janus.plugin.streaming.jcfg:
rtp-sample: {
type = "rtp"
id = 1
description = "Test Stream - 1"
metadata = "You can use this metadata section to put any info you want!"
audio = true
video = true
audioport = 8005
audiopt = 10
audiortpmap = "opus/48000/2"
videoport = 8004
videopt = 96
videortpmap = "H264/90000"
}
4、在8080端口启动一个本地http服务器,打开streamingtest2.html(我当前本地IP是192.168.1.6):
192.168.1.6:8080/streamingtest2.html
5.这将启动包含标签和开始按钮的测试页面。当我单击“开始”按钮时,它会连接到端口 8088 上的 Janus API 并等待视频流。
8. 在浏览器上,视频标签被一个 5 秒的“黑色”流填充,然后停止。
请告知我做错了什么(很可能是它在管道中或 Janus 配置中的一些证书问题)。
或者请告知是否可以使用 GStreamer 的 WebRTCBin 以更简单的方式实现这一点?
当上述所有步骤发生时,请查看此 Pastebin https://pastebin.com/KeHAWjXx以获取我的 Google Chrome 控制台日志。请提供一些输入,以便我可以使用 GStreamer 和 Janus 流式传输视频。我也不知道 WebRTCBin 是如何在这一切中使用的。