2

https://github.com/BelledonneCommunications/mediastreamer2 https://github.com/BelledonneCommunications/linphone-android

仅使用 mediastreamer2 库,我可以使用给定的远程 ip 和端口启动音频呼叫,并调用 audiostream.c 相应的方法。

然后我也需要开始视频通话。因此初始化 videostream.c 并使用其各自的方法并为其提供表面以呈现远程和本地摄像机源。我能够使用远程端口和 IP 成功启动视频流。

但问题是当我同时启动两个流时。声音停止,视频流也停止。只有本地摄像头有效。

所以我有一种神奇的方法可以为我做这一切。如果我评论它的视频部分。音频通话工作正常,如果评论音频部分,视频通话工作正常。但是当我同时开始时。没有声音没有流媒体。但我们得到 AudioStream 成功启动和 VideoStream 启动成功日志。

有 linphone 经验的人可以帮助确定正确的方法顺序吗?或者我们做错了什么。这是我们的方法。

JNIEXPORT jint JNICALL  Java_com_myapp_services_LinPhoneMSEngine_LinPhoneMSVE_1AudioStreamStartFull
(JNIEnv *env, jclass self, jstring remote_ip, jint remote_port, jint localport, jint payloadindex, jboolean isLowEnd)
{
int bRetVal = 0;
MSVideoSize size = {320, 240};
char rtcp_tool[128]={0};
int ret;
//jboolean copy;
char cname[128]={0};
const char *cremote_ip;

ortp_warning("Audio Stream Start Full");
LOGD("Audio Stream Start Full");

cremote_ip = (*env)->GetStringUTFChars(env, remote_ip, NULL);
ortp_warning("Cremote_ip= %s", cremote_ip);
LOGD("Cremote_ip= %s", cremote_ip);

//  ms_filter_enable_statistics(TRUE);

veData->queue = ortp_ev_queue_new();
veData->soundCard = NULL;

set_playback_device();


ortp_warning("sound: playback_dev_id: %s", ms_snd_card_get_string_id(veData->soundCard));
LOGD("sound: playback_dev_id: %s", ms_snd_card_get_string_id(veData->soundCard));

veData->CaptureCard = NULL;
set_capture_device();

ortp_warning("sound: capture_dev_id: %s", ms_snd_card_get_string_id(veData->CaptureCard));
LOGD("sound: capture_dev_id: %s", ms_snd_card_get_string_id(veData->CaptureCard));


veData->audioStream = audio_stream_new(msFactory ,localport, localport + 1, false);

audio_stream_enable_adaptive_bitrate_control(veData->audioStream, true);
audio_stream_enable_adaptive_jittcomp(veData->audioStream, true);
rtp_session_set_jitter_compensation(veData->audioStream->ms.sessions.rtp_session, 50);

rtp_session_enable_rtcp_mux(veData->audioStream->ms.sessions.rtp_session, true);
ret=AUDIO_STREAM_FEATURE_VOL_SND | \
            AUDIO_STREAM_FEATURE_VOL_RCV;

if (!isLowEnd)
{
    ret = ret | AUDIO_STREAM_FEATURE_EC | AUDIO_STREAM_FEATURE_EQUALIZER | AUDIO_STREAM_FEATURE_DTMF | AUDIO_STREAM_FEATURE_DTMF_ECHO;
    audio_stream_set_features(veData->audioStream, ret);

    ortp_warning("Setting Echo Canceller params");
    LOGD("Setting Echo Canceller params");

    rtp_session_enable_jitter_buffer(veData->audioStream->ms.sessions.rtp_session, TRUE);

    audio_stream_set_echo_canceller_params(veData->audioStream, 60, 0, 128);

    audio_stream_enable_gain_control(veData->audioStream, true);
    audio_stream_enable_automatic_gain_control(veData->audioStream, true);

}
else
{
    audio_stream_set_features(veData->audioStream, ret);

    ortp_warning("No Echo Canceller params!!");
    LOGD("No Echo Canceller params!!");
    rtp_session_enable_jitter_buffer(veData->audioStream->ms.sessions.rtp_session, FALSE);

}

if( veData->audioStream == NULL){
    ortp_warning("AudioStream is Null");
    LOGD("AudioStream is Null");
    bRetVal = -1;
    return -1;
}

audio_stream_play_received_dtmfs(veData->audioStream, true);


snprintf(rtcp_tool,sizeof(rtcp_tool)-1,"%s-%s","Android","2.8.0");

snprintf(cname,sizeof(cname)-1,"%s-%d", cremote_ip, remote_port);
ortp_warning("cname value: %s",cname);
LOGD("cname value: %s",cname);
audio_stream_prepare_sound(veData->audioStream, veData->soundCard, veData->CaptureCard);

if(0== audio_stream_start_full(veData->audioStream,veData->prof, cremote_ip, remote_port, cremote_ip, remote_port + 1, 114, 50,NULL,NULL,veData->soundCard,veData->CaptureCard, !isLowEnd))
{
    veData->rtpSession = veData->audioStream->ms.sessions.rtp_session;
    ortp_warning("AudioStreamStartFull Success");
    post_audio_config(veData->audioStream);
    audio_stream_set_rtcp_information(veData->audioStream, cname, rtcp_tool);
}
else
{
    ortp_warning("AudioStream start failed");
    bRetVal = -1;
}

// init video stream
veData->videoStream = video_stream_new(msFactory, localport,localport+1,false);

video_stream_enable_adaptive_bitrate_control(veData->videoStream, true);
video_stream_enable_adaptive_jittcomp(veData->videoStream, true);
rtp_session_enable_rtcp_mux(veData->videoStream->ms.sessions.rtp_session, true);

video_stream_use_video_preset(veData->videoStream, "custom");
video_stream_set_sent_video_size(veData->videoStream, size);
video_stream_set_preview_size(veData->videoStream, size);
video_stream_enable_self_view(veData->videoStream, TRUE);

ortp_message("Video Stream : [%p] & native window id : [%p]",veData->videoStream, veData->native_window_id);
video_stream_set_native_window_id(veData->videoStream, veData->native_window_id);

ortp_message("Video Stream : [%p] & preview window id : [%p]",veData->videoStream, veData->native_preview_window_id);
video_stream_set_native_preview_window_id(veData->videoStream, veData->native_preview_window_id);

video_stream_use_preview_video_window(veData->videoStream, TRUE);
video_stream_set_device_rotation(veData->videoStream, 0);

video_stream_set_fps(veData->videoStream, 10.0);

// link audio with video
audio_stream_link_video(veData->audioStream, veData->videoStream);

ms_message("Setting webcam as %p", veData->msWebCam);
if(bRetVal != -1 && video_stream_start(veData->videoStream, veData->prof,
                      cremote_ip,
                      remote_port,
                      cremote_ip,
                      remote_port + 1,
                      101,
                      60,
                      veData->msWebCam) >=0 ) {

    ortp_warning("VideoStream started successfully");
    veData->rtpSession = veData->videoStream->ms.sessions.rtp_session;

    video_stream_set_rtcp_information(veData->videoStream, cname,rtcp_tool);
}
else
{
    ortp_warning("VideoStream start failed");
    bRetVal = -1;

}

(*env)->ReleaseStringUTFChars(env, remote_ip, cremote_ip);


return bRetVal;
}
4

1 回答 1

2

好的,最后在@belledonne-communications 的帮助下。我们发现我们在同一个端口上发送两个流。

这是不可能的。它需要在不同的端口上发送。我们纠正了它并且它起作用了。

于 2016-06-16T06:57:14.760 回答