'How to start WebRTC call with AWS Kinesis as Master

I'm trying to start a WebRTC call with AWS Kinesis, but the demo on The AWS Kinesis Javascript docs only shows how to join the call as a VIEWER not the MASTER.

I can't find a clear example anywhere online, and I've spent hours on it with my teammate.

I can see and hear myself, so I know I'm getting the hardware working correctly, but we can't see or hear each other. I know it's going to be something simple, but I just can't figure out where I'm going wrong with the connection.

const startKinesisCall = async () => {
     const coachingSession = new AWS.KinesisVideo({
          region,
          accessKeyId,
          secretAccessKey,
          correctClockSkew: true
        });

    // Get Signaling Channel Endpoints
    // Each signaling channel is assigned an HTTPS and WSS endpoint to connect to for 
    // data-plane operations. These can be discovered using the GetSignalingChannelEndpoint API.
    const getSignalingChannelEndpointResponse = await coachingSession.getSignalingChannelEndpoint({
      ChannelARN: channelARN,
      SingleMasterChannelEndpointConfiguration: {
        Protocols: ['WSS', 'HTTPS'],
        Role: Role.VIEWER
      }
    }).promise();

    const endpointsByProtocol = getSignalingChannelEndpointResponse?.ResourceEndpointList?.reduce((endpoints, endpoint) => {
      endpoints[endpoint.Protocol] = endpoint?.ResourceEndpoint;
      return endpoints;
    }, {});

    // Create KVS Signaling Client
    // The HTTPS endpoint from the GetSignalingChannelEndpoint response is used with this client. 
    // This client is just used for getting ICE servers, not for actual signaling.
    const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
      region,
      accessKeyId,
      secretAccessKey,
      endpoint: endpointsByProtocol.HTTPS,
      correctClockSkew: true,
    });

    // Get ICE server configuration
    // For best performance, we collect STUN and TURN ICE server configurations. 
    // The KVS STUN endpoint is always stun:stun.kinesisvideo.${region}.amazonaws.com:443. 
    // To get TURN servers, the GetIceServerConfig API is used.
    const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
      .getIceServerConfig({
        ChannelARN: channelARN,
      }).promise();
    const iceServers = [{ urls: `stun:stun.kinesisvideo.${region}.amazonaws.com:443` }];
    getIceServerConfigResponse.IceServerList.forEach(iceServer =>
      iceServers.push({
        urls: iceServer.Uris,
        username: iceServer.Username,
        credential: iceServer.Password,
      }),
    );

    console.log('ICE SERVERS: ', iceServers);

    // Create RTCPeerConnection
    // The RTCPeerConnection is the primary interface for WebRTC communications in the Web.
    const peerConnection = new RTCPeerConnection({ iceServers });

    // Create WebRTC Signaling Client
    // This is the actual client that is used to send messages over the signaling channel.
    const signalingClient = new SignalingClient({
      channelARN,
      channelEndpoint: endpointsByProtocol.WSS,
      role: Role.MASTER,
      region,
      clientId,
      credentials: {
        accessKeyId,
        secretAccessKey,
      },
      systemClockOffset: coachingSession.config.systemClockOffset
    });

    // GET THE USER MEDIA DEVICES
    const localStream = await navigator.mediaDevices.getUserMedia({
      video: true,
      audio: true
    }).catch(e => {
      console.log("COULD NOT FIND WEBCAM");
      setShowErrorStartingVideoModal(true);
    });

    // *** AUDIO & VIDEO DEVICE COLLECTION ***
    let audioInputDevices: MediaDeviceInfo[];
    let audioOutputDevices: MediaDeviceInfo[];
    let videoInputDevices: MediaDeviceInfo[];
    try {
      const mediaDevices = await navigator.mediaDevices.enumerateDevices();
      audioInputDevices = mediaDevices.filter(device => device.kind === 'audioinput');
      audioOutputDevices = mediaDevices.filter(device => device.kind === 'audiooutput');
      videoInputDevices = mediaDevices.filter(device => device.kind === 'videoinput');
      setMicrophoneList(audioInputDevices);
      setSpeakerList(audioOutputDevices);
      setCameraList(videoInputDevices);
    } catch (e) {
      console.log(e);
      console.log("ERROR COLLECTING MEDIA DEVICE INFORMATION: MAKE SURE PERMISSIONS ARE ALLOWED AND TRY AGAIN");
    };

    // GRAB THE LOCAL PROVIDER AND PATIENT VIDEO TILES
    const providerVideoTile: HTMLVideoElement = document.getElementById('provider-video-element') as HTMLVideoElement;
    const patientVideoElement = document.getElementById('patient-video-element') as HTMLVideoElement;
    // let dataChannel: RTCDataChannel
    // Add Signaling Client Event Listeners
    signalingClient.on('open', async () => {
      if (!localStream || !peerConnection) return;

      // Get a stream from the webcam, add it to the peer connection, and display it in the local view
      try {
        localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream));
        providerVideoTile.srcObject = localStream;
      } catch (e) {
        // Could not find webcam
        console.log(e);
        return;
      };

      // Create an SDP offer and send it to the master
      const offer = await peerConnection.createOffer({
        offerToReceiveAudio: true,
        offerToReceiveVideo: true
      });

      console.log('CREATED OFFER: ', offer);

      await peerConnection.setLocalDescription(offer);

      if (peerConnection.localDescription) signalingClient.sendSdpOffer(peerConnection.localDescription, patient.patientID);
    });

    // When the SDP answer is received back from the master, add it to the peer connection.
    signalingClient.on('sdpAnswer', async answer => {
      console.log('RECEIVED ANSWER: ', answer);
      if (!peerConnection) return;
      await peerConnection.setRemoteDescription(answer).catch(e => console.log(e));
    });

    signalingClient.on('sdpOffer', async (offer, senderClientID) => {
      console.log({ offer });
      if (!peerConnection) return;

      await peerConnection.setRemoteDescription(offer).catch(e => console.log(e));
      console.log('REMOTE DESCRIPTION SET: ', peerConnection);
      const answer = await peerConnection.createAnswer().catch(e => console.log(e));

      console.log({ answer });
      if (answer) signalingClient.sendSdpAnswer(answer, senderClientID);

      // dataChannel = peerConnection.createDataChannel(`data-channel-of-${senderClientID}`);
      // dataChannel.addEventListener("open", (event) => {
      //   console.log(event);
      //   dataChannel.send('******HI ALEC*******');
      // });
    });

    // When an ICE candidate is received from the master, add it to the peer connection.
    signalingClient.on('iceCandidate', async (candidate, senderClientID) => {
      if (!peerConnection) return;
      console.log('new iceCandidate received:', candidate);

      await peerConnection.addIceCandidate(candidate).catch(e => console.log(e));
      console.log("ICE CANDIDATE ADDED: ", candidate);
    });


    signalingClient.on('close', async () => {
      if (!localStream) return;
      // Handle client closures
      console.log("ENDING THE CALL");
      localStream.getTracks().forEach(track => track.stop());
      peerConnection.close();
      if ('srcObject' in providerVideoTile) providerVideoTile.srcObject = null;
    });

    signalingClient.on('error', error => {
      // Handle client errors
      console.log(error);
    });

    signalingClient.on('chat', (dataMessage: any) => {
      const decodedMessage = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
      console.log("GOT TEST MESSAGE:", decodedMessage);
    });

    signalingClient.on('SeriesData', (dataMessage: any) => {
      const seriesFromMobile = JSON.parse(UTF8Decoder.decode(new Uint8Array(dataMessage.data)));
      console.log("SERIES FROM MOBILE:", seriesFromMobile);
      kickOffSeriesCreation(seriesFromMobile);
    });

    signalingClient.on('EffortMarker', (dataMessage: any) => {
      const effortMarker = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
      console.log("EFFORT MARKER:", effortMarker);
      setEffortMarker(effortMarker);
    });

    signalingClient.on('CoachingMessage', async (dataMessage: any) => {
      const coachingMessage = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
      console.log("COACHING MESSAGE FROM MOBILE:", coachingMessage);
      if (coachingMessage === 'EndSeries') {
        await handleForceEndEffort(signalingClient);
        await handleEndSeries(signalingClient);
      };
    });

    // Add Peer Connection Event Listeners
    // Send any ICE candidates generated by the peer connection to the other peer
    peerConnection.addEventListener('icecandidate', ({ candidate }) => {
      if (candidate) {
        console.log(candidate);
        signalingClient.sendIceCandidate(candidate, patient.patientID);
      } else {
        // No more ICE candidates will be generated
        console.log('NO MORE ICE CANDIDATES WILL BE GENERATED');
      }
    });

    // As remote tracks are received, add them to the remote view
    peerConnection.addEventListener('track', event => {
      // if (patientVideoElement.srcObject) return;
      setNoPatientConnected(false);
      console.log({ event });
      try {
        peerConnection.addTrack(event.track, event.streams[0]);
        if (event.track.kind === 'video') patientVideoElement.srcObject = event.streams[0];
      } catch (e) {
        console.log(e);
      }
    });

    // Open Signaling Connection
    signalingClient.open();
  };


Solution 1:[1]

Try this this page, You can use master on one computer and viewer on other.

https://awslabs.github.io/amazon-kinesis-video-streams-webrtc-sdk-js/examples/index.html

Solution 2:[2]

For anyone else with the same issue, I managed to find the master example on this github repo and was able to get it working

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 Delighteck
Solution 2 Dru Serkes