var RtcSession = function()

in monitoring-stack-cdk/resources/frontend/connect-rtc.js [9455:10420]


    var RtcSession = function () {
        /**
         * Build an AmazonConnect RTC session.
         * @param {*} signalingUri
         * @param {*} iceServers Array of ice servers
         * @param {*} contactToken A string representing the contact token (optional)
         * @param {*} logger An object provides logging functions, such as console
         * @param {*} contactId Must be UUID, uniquely identifies the session.
         */
        function RtcSession(signalingUri, iceServers, contactToken, logger, contactId, connectionId, wssManager) {
            (0, _classCallCheck3.default)(this, RtcSession);
    
            if (typeof signalingUri !== 'string' || signalingUri.trim().length === 0) {
                throw new _exceptions.IllegalParameters('signalingUri required');
            }
            if (!iceServers) {
                throw new _exceptions.IllegalParameters('iceServers required');
            }
            if ((typeof logger === 'undefined' ? 'undefined' : (0, _typeof3.default)(logger)) !== 'object') {
                throw new _exceptions.IllegalParameters('logger required');
            }
            if (!contactId) {
                this._callId = (0, _v2.default)();
            } else {
                this._callId = contactId;
            }
            this._connectionId = connectionId;
            this._wssManager = wssManager;
            this._sessionReport = new _session_report.SessionReport();
            this._signalingUri = signalingUri;
            this._iceServers = iceServers;
            this._contactToken = contactToken;
            this._originalLogger = logger;
            this._logger = (0, _utils.wrapLogger)(this._originalLogger, this._callId, 'SESSION');
            this._iceTimeoutMillis = _rtc_const.DEFAULT_ICE_TIMEOUT_MS;
            this._gumTimeoutMillis = _rtc_const.DEFAULT_GUM_TIMEOUT_MS;
    
            this._enableAudio = true;
            this._enableVideo = false;
            this._facingMode = 'user';
            this._legacyStatsReportSupport = false;
            /**
             * user may provide the stream to the RtcSession directly to connect to the other end.
             * user may also acquire the stream from the local device.
             * This flag is used to track where the stream is acquired.
             * If it's acquired from local devices, then we must close the stream when the session ends.
             * If it's provided by user (rather than local camera/microphone), then we should leave it open when the
             * session ends.
             */
            this._userProvidedStream = false;
    
            this._onGumError = this._onGumSuccess = this._onLocalStreamAdded = this._onSessionFailed = this._onSessionInitialized = this._onSignalingConnected = this._onIceCollectionComplete = this._onSignalingStarted = this._onSessionConnected = this._onRemoteStreamAdded = this._onSessionCompleted = this._onSessionDestroyed = function () {};
        }
    
        (0, _createClass3.default)(RtcSession, [{
            key: 'pauseLocalVideo',
            value: function pauseLocalVideo() {
                if (this._localStream) {
                    var videoTrack = this._localStream.getVideoTracks()[0];
                    if (videoTrack) {
                        videoTrack.enabled = false;
                    }
                }
            }
        }, {
            key: 'resumeLocalVideo',
            value: function resumeLocalVideo() {
                if (this._localStream) {
                    var videoTrack = this._localStream.getVideoTracks()[0];
                    if (videoTrack) {
                        videoTrack.enabled = true;
                    }
                }
            }
        }, {
            key: 'pauseRemoteVideo',
            value: function pauseRemoteVideo() {
                if (this._remoteVideoStream) {
                    var videoTrack = this._remoteVideoStream.getTracks()[1];
                    if (videoTrack) {
                        videoTrack.enabled = false;
                    }
                }
            }
        }, {
            key: 'resumeRemoteVideo',
            value: function resumeRemoteVideo() {
                if (this._remoteVideoStream) {
                    var videoTrack = this._remoteVideoStream.getTracks()[1];
                    if (videoTrack) {
                        videoTrack.enabled = true;
                    }
                }
            }
        }, {
            key: 'pauseLocalAudio',
            value: function pauseLocalAudio() {
                if (this._localStream) {
                    var audioTrack = this._localStream.getAudioTracks()[0];
                    if (audioTrack) {
                        audioTrack.enabled = false;
                    }
                }
            }
        }, {
            key: 'resumeLocalAudio',
            value: function resumeLocalAudio() {
                if (this._localStream) {
                    var audioTrack = this._localStream.getAudioTracks()[0];
                    if (audioTrack) {
                        audioTrack.enabled = true;
                    }
                }
            }
        }, {
            key: 'pauseRemoteAudio',
            value: function pauseRemoteAudio() {
                if (this._remoteAudioStream) {
                    var audioTrack = this._remoteAudioStream.getTracks()[0];
                    if (audioTrack) {
                        audioTrack.enabled = false;
                    }
                }
            }
        }, {
            key: 'resumeRemoteAudio',
            value: function resumeRemoteAudio() {
                if (this._remoteAudioStream) {
                    var audioTrack = this._remoteAudioStream.getTracks()[0];
                    if (audioTrack) {
                        audioTrack.enabled = true;
                    }
                }
            }
            /**
             * Callback when gUM succeeds.
             * First param is RtcSession object.
             */
    
        }, {
            key: 'transit',
            value: function transit(nextState) {
                try {
                    this._logger.info((this._state ? this._state.name : 'null') + ' => ' + nextState.name);
                    if (this._state && this._state.onExit) {
                        this._state.onExit();
                    }
                } finally {
                    this._state = nextState;
                    if (nextState.onEnter) {
                        try {
                            nextState.onEnter();
                        } catch (e) {
                            this._logger.warn(nextState.name + '#onEnter failed', e);
                            throw e; // eslint-disable-line no-unsafe-finally
                        }
                    }
                }
            }
        }, {
            key: '_createSignalingChannel',
            value: function _createSignalingChannel() {
                var signalingChannel = new _signaling2.default(this._callId, this._signalingUri, this._contactToken, this._originalLogger, this._signalingConnectTimeout, this._connectionId, this._wssManager);
                signalingChannel.onConnected = (0, _utils.hitch)(this, this._signalingConnected);
                signalingChannel.onAnswered = (0, _utils.hitch)(this, this._signalingAnswered);
                signalingChannel.onHandshaked = (0, _utils.hitch)(this, this._signalingHandshaked);
                signalingChannel.onRemoteHungup = (0, _utils.hitch)(this, this._signalingRemoteHungup);
                signalingChannel.onFailed = (0, _utils.hitch)(this, this._signalingFailed);
                signalingChannel.onDisconnected = (0, _utils.hitch)(this, this._signalingDisconnected);
    
                this._signalingChannel = signalingChannel;
    
                return signalingChannel;
            }
        }, {
            key: '_signalingConnected',
            value: function _signalingConnected() {
                this._state.onSignalingConnected();
            }
        }, {
            key: '_signalingAnswered',
            value: function _signalingAnswered(sdp, candidates) {
                this._state.onSignalingAnswered(sdp, candidates);
            }
        }, {
            key: '_signalingHandshaked',
            value: function _signalingHandshaked() {
                this._state.onSignalingHandshaked();
            }
        }, {
            key: '_signalingRemoteHungup',
            value: function _signalingRemoteHungup() {
                this._state.onRemoteHungup();
            }
        }, {
            key: '_signalingFailed',
            value: function _signalingFailed(e) {
                this._state.onSignalingFailed(e);
            }
        }, {
            key: '_signalingDisconnected',
            value: function _signalingDisconnected() {}
        }, {
            key: '_createPeerConnection',
            value: function _createPeerConnection(configuration) {
                return new RTCPeerConnection(configuration);
            }
        }, {
            key: 'connect',
            value: function connect() {
                var self = this;
                var now = new Date();
                self._sessionReport.sessionStartTime = now;
                self._connectTimeStamp = now.getTime();
    
                self._pc = self._createPeerConnection({
                    iceServers: self._iceServers,
                    iceTransportPolicy: 'relay',
                    rtcpMuxPolicy: 'require',
                    bundlePolicy: 'balanced',
                    sdpSemantics: 'plan-b'
                }, {
                    optional: [{
                        googDscp: true
                    }]
                });
    
                self._pc.ontrack = (0, _utils.hitch)(self, self._ontrack);
                self._pc.onicecandidate = (0, _utils.hitch)(self, self._onIceCandidate);
                self._pc.oniceconnectionstatechange = (0, _utils.hitch)(self, self._onIceStateChange);
    
                (0, _utils.isLegacyStatsReportSupported)(self._pc).then(function (result) {
                    self._legacyStatsReportSupport = result;
                    self.transit(new GrabLocalMediaState(self));
                });
            }
        }, {
            key: 'accept',
            value: function accept() {
                throw new _exceptions.UnsupportedOperation('accept does not go through signaling channel at this moment');
            }
        }, {
            key: 'hangup',
            value: function hangup() {
                this._state.hangup();
            }
    
            /**
             * Get a promise containing an object with two named lists of audio stats, one for each channel on each
             * media type of 'video' and 'audio'.
             * @return Rejected promise if failed to get MediaRtpStats. The promise is never resolved with null value.
             */
    
        }, {
            key: 'getStats',
            value: function () {
                var _ref = (0, _asyncToGenerator3.default)( /*#__PURE__*/_regenerator2.default.mark(function _callee3() {
                    var _this11 = this;
    
                    var timestamp, impl, statsResult, rttReducer, audioInputRttMilliseconds, videoInputRttMilliseconds;
                    return _regenerator2.default.wrap(function _callee3$(_context3) {
                        while (1) {
                            switch (_context3.prev = _context3.next) {
                                case 0:
                                    timestamp = new Date();
    
                                    impl = function () {
                                        var _ref2 = (0, _asyncToGenerator3.default)( /*#__PURE__*/_regenerator2.default.mark(function _callee2(stream, streamType) {
                                            var tracks;
                                            return _regenerator2.default.wrap(function _callee2$(_context2) {
                                                while (1) {
                                                    switch (_context2.prev = _context2.next) {
                                                        case 0:
                                                            tracks = [];
    
                                                            if (stream) {
                                                                _context2.next = 3;
                                                                break;
                                                            }
    
                                                            return _context2.abrupt('return', []);
    
                                                        case 3:
                                                            _context2.t0 = streamType;
                                                            _context2.next = _context2.t0 === 'audio_input' ? 6 : _context2.t0 === 'audio_output' ? 6 : _context2.t0 === 'video_input' ? 8 : _context2.t0 === 'video_output' ? 8 : 10;
                                                            break;
    
                                                        case 6:
                                                            tracks = stream.getAudioTracks();
                                                            return _context2.abrupt('break', 11);
    
                                                        case 8:
                                                            tracks = stream.getVideoTracks();
                                                            return _context2.abrupt('break', 11);
    
                                                        case 10:
                                                            throw new Error('Unsupported stream type while trying to get stats: ' + streamType);
    
                                                        case 11:
                                                            _context2.next = 13;
                                                            return Promise.all(tracks.map(function () {
                                                                var _ref3 = (0, _asyncToGenerator3.default)( /*#__PURE__*/_regenerator2.default.mark(function _callee(track) {
                                                                    var self;
                                                                    return _regenerator2.default.wrap(function _callee$(_context) {
                                                                        while (1) {
                                                                            switch (_context.prev = _context.next) {
                                                                                case 0:
                                                                                    if (!_this11._legacyStatsReportSupport) {
                                                                                        _context.next = 5;
                                                                                        break;
                                                                                    }
    
                                                                                    self = _this11;
                                                                                    return _context.abrupt('return', new Promise(function (resolve) {
                                                                                        self._pc.getStats(function (rawStats) {
                                                                                            var digestedStats = (0, _rtpStats.extractMediaStatsFromStats)(timestamp, rawStats.result(), streamType);
                                                                                            if (!digestedStats) {
                                                                                                throw new Error('Failed to extract MediaRtpStats from RTCStatsReport for stream type ' + streamType);
                                                                                            }
                                                                                            resolve(digestedStats);
                                                                                        }, track);
                                                                                    }));
    
                                                                                case 5:
                                                                                    return _context.abrupt('return', _this11._pc.getStats().then(function (rawStats) {
                                                                                        var digestedStats = (0, _rtpStats.extractMediaStatsFromStats)(timestamp, rawStats, streamType);
                                                                                        if (!digestedStats) {
                                                                                            throw new Error('Failed to extract MediaRtpStats from RTCStatsReport for stream type ' + streamType);
                                                                                        }
                                                                                        return digestedStats;
                                                                                    }));
    
                                                                                case 6:
                                                                                case 'end':
                                                                                    return _context.stop();
                                                                            }
                                                                        }
                                                                    }, _callee, _this11);
                                                                }));
    
                                                                return function (_x3) {
                                                                    return _ref3.apply(this, arguments);
                                                                };
                                                            }()));
    
                                                        case 13:
                                                            return _context2.abrupt('return', _context2.sent);
    
                                                        case 14:
                                                        case 'end':
                                                            return _context2.stop();
                                                    }
                                                }
                                            }, _callee2, _this11);
                                        }));
    
                                        return function impl(_x, _x2) {
                                            return _ref2.apply(this, arguments);
                                        };
                                    }();
    
                                    if (!(this._pc && this._pc.signalingState === 'stable')) {
                                        _context3.next = 26;
                                        break;
                                    }
    
                                    _context3.next = 5;
                                    return impl(this._remoteAudioStream, 'audio_input');
    
                                case 5:
                                    _context3.t0 = _context3.sent;
                                    _context3.next = 8;
                                    return impl(this._localStream, 'audio_output');
    
                                case 8:
                                    _context3.t1 = _context3.sent;
                                    _context3.t2 = {
                                        input: _context3.t0,
                                        output: _context3.t1
                                    };
                                    _context3.next = 12;
                                    return impl(this._remoteVideoStream, 'video_input');
    
                                case 12:
                                    _context3.t3 = _context3.sent;
                                    _context3.next = 15;
                                    return impl(this._localStream, 'video_output');
    
                                case 15:
                                    _context3.t4 = _context3.sent;
                                    _context3.t5 = {
                                        input: _context3.t3,
                                        output: _context3.t4
                                    };
                                    statsResult = {
                                        audio: _context3.t2,
                                        video: _context3.t5
                                    };
    
                                    // For consistency's sake, coalesce rttMilliseconds into the output for audio and video.
                                    rttReducer = function rttReducer(acc, stats) {
                                        if (stats.rttMilliseconds !== null && (acc === null || stats.rttMilliseconds > acc)) {
                                            acc = stats.rttMilliseconds;
                                        }
                                        stats._rttMilliseconds = null;
                                        return acc;
                                    };
    
                                    audioInputRttMilliseconds = statsResult.audio.input.reduce(rttReducer, null);
                                    videoInputRttMilliseconds = statsResult.video.input.reduce(rttReducer, null);
    
    
                                    if (audioInputRttMilliseconds !== null) {
                                        statsResult.audio.output.forEach(function (stats) {
                                            stats._rttMilliseconds = audioInputRttMilliseconds;
                                        });
                                    }
    
                                    if (videoInputRttMilliseconds !== null) {
                                        statsResult.video.output.forEach(function (stats) {
                                            stats._rttMilliseconds = videoInputRttMilliseconds;
                                        });
                                    }
    
                                    return _context3.abrupt('return', statsResult);
    
                                case 26:
                                    return _context3.abrupt('return', Promise.reject(new _exceptions.IllegalState()));
    
                                case 27:
                                case 'end':
                                    return _context3.stop();
                            }
                        }
                    }, _callee3, this);
                }));
    
                function getStats() {
                    return _ref.apply(this, arguments);
                }
    
                return getStats;
            }()
    
            /**
             * Get a promise of MediaRtpStats object for remote audio (from Amazon Connect to client).
             * @return Rejected promise if failed to get MediaRtpStats. The promise is never resolved with null value.
             * @deprecated in favor of getStats()
             */
    
        }, {
            key: 'getRemoteAudioStats',
            value: function getRemoteAudioStats() {
                return this.getStats().then(function (stats) {
                    if (stats.audio.output.length > 0) {
                        return stats.audio.output[0];
                    } else {
                        return Promise.reject(new _exceptions.IllegalState());
                    }
                });
            }
    
            /**
             * Get a promise of MediaRtpStats object for user audio (from client to Amazon Connect).
             * @return Rejected promise if failed to get MediaRtpStats. The promise is never resolved with null value.
             * @deprecated in favor of getStats()
             */
    
        }, {
            key: 'getUserAudioStats',
            value: function getUserAudioStats() {
                return this.getStats().then(function (stats) {
                    if (stats.audio.input.length > 0) {
                        return stats.audio.input[0];
                    } else {
                        return Promise.reject(new _exceptions.IllegalState());
                    }
                });
            }
    
            /**
             * Get a promise of MediaRtpStats object for user video (from client to Amazon Connect).
             * @return Rejected promise if failed to get MediaRtpStats. The promise is never resolved with null value.
             * @deprecated in favor of getStats()
             */
    
        }, {
            key: 'getRemoteVideoStats',
            value: function getRemoteVideoStats() {
                return this.getStats().then(function (stats) {
                    if (stats.video.output.length > 0) {
                        return stats.video.output[0];
                    } else {
                        return Promise.reject(new _exceptions.IllegalState());
                    }
                });
            }
    
            /**
             * Get a promise of MediaRtpStats object for user video (from client to Amazon Connect).
             * @return Rejected promise if failed to get MediaRtpStats. The promise is never resolved with null value.
             * @deprecated in favor of getStats()
             */
    
        }, {
            key: 'getUserVideoStats',
            value: function getUserVideoStats() {
                return this.getStats().then(function (stats) {
                    if (stats.video.input.length > 0) {
                        return stats.video.input[0];
                    } else {
                        return Promise.reject(new _exceptions.IllegalState());
                    }
                });
            }
        }, {
            key: '_onIceCandidate',
            value: function _onIceCandidate(evt) {
                this._state.onIceCandidate(evt);
            }
        }, {
            key: '_onIceStateChange',
            value: function _onIceStateChange(evt) {
                this._state.onIceStateChange(evt);
            }
    
            /**
             * Attach remote media stream to web element.
             */
    
        }, {
            key: '_ontrack',
            value: function _ontrack(evt) {
                if (evt.streams.length > 1) {
                    this._logger.warn('Found more than 1 streams for ' + evt.track.kind + ' track ' + evt.track.id + ' : ' + evt.streams.map(function (stream) {
                        return stream.id;
                    }).join(','));
                }
                if (evt.track.kind === 'video' && this._remoteVideoElement) {
                    this._remoteVideoElement.srcObject = evt.streams[0];
                    this._remoteVideoStream = evt.streams[0];
                } else if (evt.track.kind === 'audio' && this._remoteAudioElement) {
                    this._remoteAudioElement.srcObject = evt.streams[0];
                    this._remoteAudioStream = evt.streams[0];
                }
                this._onRemoteStreamAdded(this, evt.streams[0]);
            }
        }, {
            key: '_detachMedia',
            value: function _detachMedia() {
                if (this._remoteVideoElement) {
                    this._remoteVideoElement.srcObject = null;
                }
                if (this._remoteAudioElement) {
                    this._remoteAudioElement.srcObject = null;
                    this._remoteAudioStream = null;
                }
            }
        }, {
            key: '_stopSession',
            value: function _stopSession() {
                try {
                    if (this._localStream && !this._userProvidedStream) {
                        (0, _utils.closeStream)(this._localStream);
                        this._localStream = null;
                        this._userProvidedStream = false;
                    }
                } finally {
                    try {
                        if (this._pc) {
                            this._pc.close();
                        }
                    } catch (e) {
                        // eat exception
                    } finally {
                        this._pc = null;
                    }
                }
            }
        }, {
            key: '_buildMediaConstraints',
            value: function _buildMediaConstraints() {
                var self = this;
                var mediaConstraints = {};
    
                if (self._enableAudio) {
                    var audioConstraints = {};
                    if (typeof self._echoCancellation !== 'undefined') {
                        audioConstraints.echoCancellation = !!self._echoCancellation;
                    }
                    if (Object.keys(audioConstraints).length > 0) {
                        mediaConstraints.audio = audioConstraints;
                    } else {
                        mediaConstraints.audio = true;
                    }
                } else {
                    mediaConstraints.audio = false;
                }
    
                if (self._enableVideo) {
                    var videoConstraints = {};
                    var widthConstraints = {};
                    var heightConstraints = {};
                    var frameRateConstraints = {};
    
                    //build video width constraints
                    if (typeof self._idealVideoWidth !== 'undefined') {
                        widthConstraints.ideal = self._idealVideoWidth;
                    }
                    if (typeof self._maxVideoWidth !== 'undefined') {
                        widthConstraints.max = self._maxVideoWidth;
                    }
                    if (typeof self._minVideoWidth !== 'undefined') {
                        widthConstraints.min = self._minVideoWidth;
                    }
                    // build video height constraints
                    if (typeof self._idealVideoHeight !== 'undefined') {
                        heightConstraints.ideal = self._idealVideoHeight;
                    }
                    if (typeof self._maxVideoHeight !== 'undefined') {
                        heightConstraints.max = self._maxVideoHeight;
                    }
                    if (typeof self._minVideoHeight !== 'undefined') {
                        heightConstraints.min = self._minVideoHeight;
                    }
                    if (Object.keys(widthConstraints).length > 0 && Object.keys(heightConstraints).length > 0) {
                        videoConstraints.width = widthConstraints;
                        videoConstraints.height = heightConstraints;
                    }
                    // build frame rate constraints
                    if (typeof self._videoFrameRate !== 'undefined') {
                        frameRateConstraints.ideal = self._videoFrameRate;
                    }
                    if (typeof self._minVideoFrameRate !== 'undefined') {
                        frameRateConstraints.min = self._minVideoFrameRate;
                    }
                    if (typeof self._maxVideoFrameRate !== 'undefined') {
                        frameRateConstraints.max = self._maxVideoFrameRate;
                    }
                    if (Object.keys(frameRateConstraints).length > 0) {
                        videoConstraints.frameRate = frameRateConstraints;
                    }
    
                    // build facing mode constraints
                    if (self._facingMode !== 'user' && self._facingMode !== "environment") {
                        self._facingMode = 'user';
                    }
                    videoConstraints.facingMode = self._facingMode;
    
                    // set video constraints
                    if (Object.keys(videoConstraints).length > 0) {
                        mediaConstraints.video = videoConstraints;
                    } else {
                        mediaConstraints.video = true;
                    }
                }
    
                return mediaConstraints;
            }
        }, {
            key: 'sessionReport',
            get: function get() {
                return this._sessionReport;
            }
        }, {
            key: 'callId',
            get: function get() {
                return this._callId;
            }
            /**
             * getMediaStream returns the local stream, which may be acquired from local device or from user provided stream.
             * Rather than getting a stream by calling getUserMedia (which gets a stream from local device such as camera),
             * user could also provide the stream to the RtcSession directly to connect to the other end.
             */
    
        }, {
            key: 'mediaStream',
            get: function get() {
                return this._localStream;
            },
    
            /**
             * Optional. RtcSession will grab input device if this is not specified.
             * Please note: this RtcSession class only support single audio track and/or single video track.
             */
            set: function set(input) {
                this._localStream = input;
                this._userProvidedStream = true;
            }
            /**
             * Needed, expect an audio element that can be used to play remote audio stream.
             */
    
        }, {
            key: 'remoteVideoStream',
            get: function get() {
                return this._remoteVideoStream;
            }
        }, {
            key: 'onGumSuccess',
            set: function set(handler) {
                this._onGumSuccess = handler;
            }
            /**
             * Callback when gUM fails.
             * First param is RtcSession object.
             * Second param is the error.
             */
    
        }, {
            key: 'onGumError',
            set: function set(handler) {
                this._onGumError = handler;
            }
            /**
             * Callback if failed initializing local resources
             * First param is RtcSession object.
             */
    
        }, {
            key: 'onSessionFailed',
            set: function set(handler) {
                this._onSessionFailed = handler;
            }
            /**
             * Callback after local user media stream is added to the session.
             * First param is RtcSession object.
             * Second param is media stream
             */
    
        }, {
            key: 'onLocalStreamAdded',
            set: function set(handler) {
                this._onLocalStreamAdded = handler;
            }
            /**
             * Callback when all local resources are ready. Establishing signaling chanel and ICE collection happens at the same time after this.
             * First param is RtcSession object.
             */
    
        }, {
            key: 'onSessionInitialized',
            set: function set(handler) {
                this._onSessionInitialized = handler;
            }
            /**
             * Callback when signaling channel is established.
             * RTC session will move forward only if onSignalingConnected and onIceCollectionComplete are both called.
             *
             * First param is RtcSession object.
             */
    
        }, {
            key: 'onSignalingConnected',
            set: function set(handler) {
                this._onSignalingConnected = handler;
            }
            /**
             * Callback when ICE collection completes either because there is no more candidate or collection timed out.
             * RTC session will move forward only if onSignalingConnected and onIceCollectionComplete are both called.
             *
             * First param is RtcSession object.
             * Second param is boolean, TRUE - ICE collection timed out.
             * Third param is number of candidates collected.
             */
    
        }, {
            key: 'onIceCollectionComplete',
            set: function set(handler) {
                this._onIceCollectionComplete = handler;
            }
            /**
             * Callback when signaling channel is established and ICE collection completed with at least one candidate.
             * First param is RtcSession object.
             */
    
        }, {
            key: 'onSignalingStarted',
            set: function set(handler) {
                this._onSignalingStarted = handler;
            }
            /**
             * Callback when the call is established (handshaked and media stream should be flowing)
             * First param is RtcSession object.
             */
    
        }, {
            key: 'onSessionConnected',
            set: function set(handler) {
                this._onSessionConnected = handler;
            }
            /**
             * Callback after remote media stream is added to the session.
             * This could be called multiple times with the same stream if multiple tracks are included in the same stream.
             *
             * First param is RtcSession object.
             * Second param is media stream track.
             */
    
        }, {
            key: 'onRemoteStreamAdded',
            set: function set(handler) {
                this._onRemoteStreamAdded = handler;
            }
            /**
             * Callback when the hangup is initiated (implies the call was successfully established).
             * First param is RtcSession object.
             */
    
        }, {
            key: 'onSessionCompleted',
            set: function set(handler) {
                this._onSessionCompleted = handler;
            }
            /**
             * Callback after session is cleaned up, no matter if the call was successfully established or not.
             * First param is RtcSession object.
             * Second param is SessionReport object.
             */
    
        }, {
            key: 'onSessionDestroyed',
            set: function set(handler) {
                this._onSessionDestroyed = handler;
            }
        }, {
            key: 'enableAudio',
            set: function set(flag) {
                this._enableAudio = flag;
            }
        }, {
            key: 'echoCancellation',
            set: function set(flag) {
                this._echoCancellation = flag;
            }
        }, {
            key: 'enableVideo',
            set: function set(flag) {
                this._enableVideo = flag;
            }
        }, {
            key: 'maxVideoFrameRate',
            set: function set(frameRate) {
                this._maxVideoFrameRate = frameRate;
            }
        }, {
            key: 'minVideoFrameRate',
            set: function set(frameRate) {
                this._minVideoFrameRate = frameRate;
            }
        }, {
            key: 'videoFrameRate',
            set: function set(frameRate) {
                this._videoFrameRate = frameRate;
            }
        }, {
            key: 'maxVideoWidth',
            set: function set(width) {
                this._maxVideoWidth = width;
            }
        }, {
            key: 'minVideoWidth',
            set: function set(width) {
                this._minVideoWidth = width;
            }
        }, {
            key: 'idealVideoWidth',
            set: function set(width) {
                this._idealVideoWidth = width;
            }
        }, {
            key: 'maxVideoHeight',
            set: function set(height) {
                this._maxVideoHeight = height;
            }
        }, {
            key: 'minVideoHeight',
            set: function set(height) {
                this._minVideoHeight = height;
            }
        }, {
            key: 'idealVideoHeight',
            set: function set(height) {
                this._idealVideoHeight = height;
            }
        }, {
            key: 'facingMode',
            set: function set(mode) {
                this._facingMode = mode;
            }
        }, {
            key: 'remoteAudioElement',
            set: function set(element) {
                this._remoteAudioElement = element;
            }
        }, {
            key: 'remoteVideoElement',
            set: function set(element) {
                this._remoteVideoElement = element;
            }
            /**
             * Override the default signaling connect time out.
             */
    
        }, {
            key: 'signalingConnectTimeout',
            set: function set(ms) {
                this._signalingConnectTimeout = ms;
            }
            /**
             * Override the default ICE collection time limit.
             */
    
        }, {
            key: 'iceTimeoutMillis',
            set: function set(timeoutMillis) {
                this._iceTimeoutMillis = timeoutMillis;
            }
            /**
             * Override the default GUM timeout time limit.
             */
    
        }, {
            key: 'gumTimeoutMillis',
            set: function set(timeoutMillis) {
                this._gumTimeoutMillis = timeoutMillis;
            }
            /**
             * connect-rtc-js initiate the handshaking with all browser supported codec by default, Amazon Connect service will choose the codec according to its preference setting.
             * Setting this attribute will force connect-rtc-js to only use specified codec.
             * WARNING: Setting this to unsupported codec will cause the failure of handshaking.
             * Supported audio codecs: opus.
             */
    
        }, {
            key: 'forceAudioCodec',
            set: function set(audioCodec) {
                this._forceAudioCodec = audioCodec;
            }
    
            /**
             * connect-rtc-js initiate the handshaking with all browser supported codec by default, Amazon Connect service will choose the codec according to its preference setting.
             * Setting this attribute will force connect-rtc-js to only use specified codec.
             * WARNING: Setting this to unsupported codec will cause the failure of handshaking.
             * Supported video codecs: VP8, VP9, H264.
             */
    
        }, {
            key: 'forceVideoCodec',
            set: function set(videoCodec) {
                this._forceVideoCodec = videoCodec;
            }
    
            /**
             * connect-rtc-js disables OPUS DTX by default because it harms audio quality.
             * @param flag boolean
             */
    
        }, {
            key: 'enableOpusDtx',
            set: function set(flag) {
                this._enableOpusDtx = flag;
            }
        }]);
        return RtcSession;
    }();