function setupWebRTC()

in static/js/chat.js [85:157]


function setupWebRTC(iceServerUrl, iceServerUsername, iceServerCredential) {
    peerConnection = new RTCPeerConnection({
         iceServers: [{
             urls: [iceServerUrl],
             username: iceServerUsername,
             credential: iceServerCredential
         }]
    });
    
    peerConnection.ontrack = function(event) {
         if (event.track.kind === 'audio') {
            let audioElement = document.createElement('audio');
            audioElement.id = 'audioPlayer';
            audioElement.srcObject = event.streams[0];
            audioElement.autoplay = true;

            // Attach debugging event listeners
            audioElement.onplay = () => console.log("Audio element started playing");
            audioElement.onpause = () => console.log("Audio element paused");
            audioElement.onended = () => console.log("Audio playback ended");
            audioElement.onerror = (e) => console.error("Audio element error:", e);             

            console.log("WebRTC audio connected.");
            const container = document.getElementById('remoteVideo');
            container.querySelectorAll('audio').forEach(el => el.remove());
            container.appendChild(audioElement);
         }

         if (event.track.kind === 'video') {
            let videoElement = document.createElement('video');
            videoElement.id = 'videoPlayer';
            videoElement.srcObject = event.streams[0];
            videoElement.autoplay = true;
            videoElement.playsInline = true;
            videoElement.muted = true; // Mute video to allow autoplay without user gesture
            videoElement.onplaying = () => {
               const container = document.getElementById('remoteVideo');
               container.querySelectorAll('video').forEach(el => el.remove());
               container.appendChild(videoElement);
               console.log("WebRTC video connected.");
               // Enable microphone (startRecording button)
               document.getElementById('startRecording').disabled = false;
               sessionActive = true;
            };
            videoElement.play().catch(e => console.error("Error playing video: ", e));
        }        
    };
    
    // Offer to receive one audio and one video track
    peerConnection.addTransceiver('video', { direction: 'sendrecv' });
    peerConnection.addTransceiver('audio', { direction: 'sendrecv' });
    
    // Start the avatar (which establishes the connection)
    avatarSynthesizer.startAvatarAsync(peerConnection)
    .then((r) => {
        if (r.reason === SpeechSDK.ResultReason.SynthesizingAudioCompleted) {
            console.log("Avatar started. Result ID: " + r.resultId);
        } else {
            console.log("Avatar failed to start. Reason: " + r.errorDetails || r.reason);
            console.log("Result ID: " + r.resultId);
            document.getElementById('startSession').disabled = false;
        }
    })
    .catch((error) => {
        console.error("Avatar start error: ", error);
        if (error instanceof SpeechSDK.SpeechSynthesisResult) {
            console.error("Error details: " + error.errorDetails);
        } else if (error instanceof Error) {
            console.error("Error message: " + error.message);
        }
        document.getElementById('startSession').disabled = false;
    });
}