0

我正在尝试使用 AWS Kinesis 启动 WebRTC 呼叫,但AWS Kinesis Javascript 文档上的演示仅显示了如何以查看者而不是 MASTER 的身份加入呼叫。

我在网上的任何地方都找不到一个明确的例子,我和我的队友花了几个小时来研究它。

我可以看到和听到自己的声音,所以我知道我的硬件工作正常,但我们看不到或听到对方的声音。我知道这将是一件简单的事情,但我就是不知道我在哪里连接错误。

const startKinesisCall = async () => {
     const coachingSession = new AWS.KinesisVideo({
          region,
          accessKeyId,
          secretAccessKey,
          correctClockSkew: true
        });

    // Get Signaling Channel Endpoints
    // Each signaling channel is assigned an HTTPS and WSS endpoint to connect to for 
    // data-plane operations. These can be discovered using the GetSignalingChannelEndpoint API.
    const getSignalingChannelEndpointResponse = await coachingSession.getSignalingChannelEndpoint({
      ChannelARN: channelARN,
      SingleMasterChannelEndpointConfiguration: {
        Protocols: ['WSS', 'HTTPS'],
        Role: Role.VIEWER
      }
    }).promise();

    const endpointsByProtocol = getSignalingChannelEndpointResponse?.ResourceEndpointList?.reduce((endpoints, endpoint) => {
      endpoints[endpoint.Protocol] = endpoint?.ResourceEndpoint;
      return endpoints;
    }, {});

    // Create KVS Signaling Client
    // The HTTPS endpoint from the GetSignalingChannelEndpoint response is used with this client. 
    // This client is just used for getting ICE servers, not for actual signaling.
    const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
      region,
      accessKeyId,
      secretAccessKey,
      endpoint: endpointsByProtocol.HTTPS,
      correctClockSkew: true,
    });

    // Get ICE server configuration
    // For best performance, we collect STUN and TURN ICE server configurations. 
    // The KVS STUN endpoint is always stun:stun.kinesisvideo.${region}.amazonaws.com:443. 
    // To get TURN servers, the GetIceServerConfig API is used.
    const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
      .getIceServerConfig({
        ChannelARN: channelARN,
      }).promise();
    const iceServers = [{ urls: `stun:stun.kinesisvideo.${region}.amazonaws.com:443` }];
    getIceServerConfigResponse.IceServerList.forEach(iceServer =>
      iceServers.push({
        urls: iceServer.Uris,
        username: iceServer.Username,
        credential: iceServer.Password,
      }),
    );

    console.log('ICE SERVERS: ', iceServers);

    // Create RTCPeerConnection
    // The RTCPeerConnection is the primary interface for WebRTC communications in the Web.
    const peerConnection = new RTCPeerConnection({ iceServers });

    // Create WebRTC Signaling Client
    // This is the actual client that is used to send messages over the signaling channel.
    const signalingClient = new SignalingClient({
      channelARN,
      channelEndpoint: endpointsByProtocol.WSS,
      role: Role.MASTER,
      region,
      clientId,
      credentials: {
        accessKeyId,
        secretAccessKey,
      },
      systemClockOffset: coachingSession.config.systemClockOffset
    });

    // GET THE USER MEDIA DEVICES
    const localStream = await navigator.mediaDevices.getUserMedia({
      video: true,
      audio: true
    }).catch(e => {
      console.log("COULD NOT FIND WEBCAM");
      setShowErrorStartingVideoModal(true);
    });

    // *** AUDIO & VIDEO DEVICE COLLECTION ***
    let audioInputDevices: MediaDeviceInfo[];
    let audioOutputDevices: MediaDeviceInfo[];
    let videoInputDevices: MediaDeviceInfo[];
    try {
      const mediaDevices = await navigator.mediaDevices.enumerateDevices();
      audioInputDevices = mediaDevices.filter(device => device.kind === 'audioinput');
      audioOutputDevices = mediaDevices.filter(device => device.kind === 'audiooutput');
      videoInputDevices = mediaDevices.filter(device => device.kind === 'videoinput');
      setMicrophoneList(audioInputDevices);
      setSpeakerList(audioOutputDevices);
      setCameraList(videoInputDevices);
    } catch (e) {
      console.log(e);
      console.log("ERROR COLLECTING MEDIA DEVICE INFORMATION: MAKE SURE PERMISSIONS ARE ALLOWED AND TRY AGAIN");
    };

    // GRAB THE LOCAL PROVIDER AND PATIENT VIDEO TILES
    const providerVideoTile: HTMLVideoElement = document.getElementById('provider-video-element') as HTMLVideoElement;
    const patientVideoElement = document.getElementById('patient-video-element') as HTMLVideoElement;
    // let dataChannel: RTCDataChannel
    // Add Signaling Client Event Listeners
    signalingClient.on('open', async () => {
      if (!localStream || !peerConnection) return;

      // Get a stream from the webcam, add it to the peer connection, and display it in the local view
      try {
        localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream));
        providerVideoTile.srcObject = localStream;
      } catch (e) {
        // Could not find webcam
        console.log(e);
        return;
      };

      // Create an SDP offer and send it to the master
      const offer = await peerConnection.createOffer({
        offerToReceiveAudio: true,
        offerToReceiveVideo: true
      });

      console.log('CREATED OFFER: ', offer);

      await peerConnection.setLocalDescription(offer);

      if (peerConnection.localDescription) signalingClient.sendSdpOffer(peerConnection.localDescription, patient.patientID);
    });

    // When the SDP answer is received back from the master, add it to the peer connection.
    signalingClient.on('sdpAnswer', async answer => {
      console.log('RECEIVED ANSWER: ', answer);
      if (!peerConnection) return;
      await peerConnection.setRemoteDescription(answer).catch(e => console.log(e));
    });

    signalingClient.on('sdpOffer', async (offer, senderClientID) => {
      console.log({ offer });
      if (!peerConnection) return;

      await peerConnection.setRemoteDescription(offer).catch(e => console.log(e));
      console.log('REMOTE DESCRIPTION SET: ', peerConnection);
      const answer = await peerConnection.createAnswer().catch(e => console.log(e));

      console.log({ answer });
      if (answer) signalingClient.sendSdpAnswer(answer, senderClientID);

      // dataChannel = peerConnection.createDataChannel(`data-channel-of-${senderClientID}`);
      // dataChannel.addEventListener("open", (event) => {
      //   console.log(event);
      //   dataChannel.send('******HI ALEC*******');
      // });
    });

    // When an ICE candidate is received from the master, add it to the peer connection.
    signalingClient.on('iceCandidate', async (candidate, senderClientID) => {
      if (!peerConnection) return;
      console.log('new iceCandidate received:', candidate);

      await peerConnection.addIceCandidate(candidate).catch(e => console.log(e));
      console.log("ICE CANDIDATE ADDED: ", candidate);
    });


    signalingClient.on('close', async () => {
      if (!localStream) return;
      // Handle client closures
      console.log("ENDING THE CALL");
      localStream.getTracks().forEach(track => track.stop());
      peerConnection.close();
      if ('srcObject' in providerVideoTile) providerVideoTile.srcObject = null;
    });

    signalingClient.on('error', error => {
      // Handle client errors
      console.log(error);
    });

    signalingClient.on('chat', (dataMessage: any) => {
      const decodedMessage = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
      console.log("GOT TEST MESSAGE:", decodedMessage);
    });

    signalingClient.on('SeriesData', (dataMessage: any) => {
      const seriesFromMobile = JSON.parse(UTF8Decoder.decode(new Uint8Array(dataMessage.data)));
      console.log("SERIES FROM MOBILE:", seriesFromMobile);
      kickOffSeriesCreation(seriesFromMobile);
    });

    signalingClient.on('EffortMarker', (dataMessage: any) => {
      const effortMarker = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
      console.log("EFFORT MARKER:", effortMarker);
      setEffortMarker(effortMarker);
    });

    signalingClient.on('CoachingMessage', async (dataMessage: any) => {
      const coachingMessage = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
      console.log("COACHING MESSAGE FROM MOBILE:", coachingMessage);
      if (coachingMessage === 'EndSeries') {
        await handleForceEndEffort(signalingClient);
        await handleEndSeries(signalingClient);
      };
    });

    // Add Peer Connection Event Listeners
    // Send any ICE candidates generated by the peer connection to the other peer
    peerConnection.addEventListener('icecandidate', ({ candidate }) => {
      if (candidate) {
        console.log(candidate);
        signalingClient.sendIceCandidate(candidate, patient.patientID);
      } else {
        // No more ICE candidates will be generated
        console.log('NO MORE ICE CANDIDATES WILL BE GENERATED');
      }
    });

    // As remote tracks are received, add them to the remote view
    peerConnection.addEventListener('track', event => {
      // if (patientVideoElement.srcObject) return;
      setNoPatientConnected(false);
      console.log({ event });
      try {
        peerConnection.addTrack(event.track, event.streams[0]);
        if (event.track.kind === 'video') patientVideoElement.srcObject = event.streams[0];
      } catch (e) {
        console.log(e);
      }
    });

    // Open Signaling Connection
    signalingClient.open();
  };
4

2 回答 2

1

试试这个页面,您可以在一台计算机上使用 master 并在另一台计算机上使用 viewer。

https://awslabs.github.io/amazon-kinesis-video-streams-webrtc-sdk-js/examples/index.html

于 2022-02-04T03:44:31.250 回答
0

对于其他有同样问题的人,我设法在这个 github repo上找到了主示例,并且能够让它工作

于 2022-02-08T12:29:42.660 回答