protected async sendAudio()

in src/common.speech/ServiceRecognizerBase.ts [617:698]


    protected async sendAudio(audioStreamNode: IAudioStreamNode): Promise<void> {
        const audioFormat: AudioStreamFormatImpl = await this.audioSource.format;

        // The time we last sent data to the service.
        let nextSendTime: number = Date.now();

        // Max amount to send before we start to throttle
        const fastLaneSizeMs: string = this.privRecognizerConfig.parameters.getProperty("SPEECH-TransmitLengthBeforThrottleMs", "5000");
        const maxSendUnthrottledBytes: number = audioFormat.avgBytesPerSec / 1000 * parseInt(fastLaneSizeMs, 10);
        const startRecogNumber: number = this.privRequestSession.recogNumber;

        const readAndUploadCycle = async (): Promise<void> => {
            // If speech is done, stop sending audio.
            if (!this.privIsDisposed &&
                !this.privRequestSession.isSpeechEnded &&
                this.privRequestSession.isRecognizing &&
                this.privRequestSession.recogNumber === startRecogNumber) {

                const connection: IConnection = await this.fetchConnection();
                const audioStreamChunk: IStreamChunk<ArrayBuffer> = await audioStreamNode.read();
                // we have a new audio chunk to upload.
                if (this.privRequestSession.isSpeechEnded) {
                    // If service already recognized audio end then don't send any more audio
                    return;
                }

                let payload: ArrayBuffer;
                let sendDelay: number;

                if (!audioStreamChunk || audioStreamChunk.isEnd) {
                    payload = null;
                    sendDelay = 0;
                } else {
                    payload = audioStreamChunk.buffer;

                    this.privRequestSession.onAudioSent(payload.byteLength);

                    if (maxSendUnthrottledBytes >= this.privRequestSession.bytesSent) {
                        sendDelay = 0;
                    } else {
                        sendDelay = Math.max(0, nextSendTime - Date.now());
                    }
                }

                if (0 !== sendDelay) {
                    await this.delay(sendDelay);
                }

                if (payload !== null) {
                    nextSendTime = Date.now() + (payload.byteLength * 1000 / (audioFormat.avgBytesPerSec * 2));
                }

                // Are we still alive?
                if (!this.privIsDisposed &&
                    !this.privRequestSession.isSpeechEnded &&
                    this.privRequestSession.isRecognizing &&
                    this.privRequestSession.recogNumber === startRecogNumber) {
                    connection.send(
                        new SpeechConnectionMessage(MessageType.Binary, "audio", this.privRequestSession.requestId, null, payload)
                    ).catch(() => {
                        this.privRequestSession.onServiceTurnEndResponse(this.privRecognizerConfig.isContinuousRecognition).catch(() => { });
                    });

                    if (!audioStreamChunk?.isEnd) {
                        // this.writeBufferToConsole(payload);
                        // Regardless of success or failure, schedule the next upload.
                        // If the underlying connection was broken, the next cycle will
                        // get a new connection and re-transmit missing audio automatically.
                        return readAndUploadCycle();
                    } else {
                        // the audio stream has been closed, no need to schedule next
                        // read-upload cycle.
                        if (!this.privIsLiveAudio) {
                            this.privRequestSession.onSpeechEnded();
                        }
                    }
                }
            }
        };

        return readAndUploadCycle();
    }