in src/common.speech/ServiceRecognizerBase.ts [391:484]
protected async receiveMessage(): Promise<void> {
try {
if (this.privIsDisposed) {
// We're done.
return;
}
let connection = await this.fetchConnection();
const message = await connection.read();
if (this.receiveMessageOverride !== undefined) {
return this.receiveMessageOverride();
}
// indicates we are draining the queue and it came with no message;
if (!message) {
if (!this.privRequestSession.isRecognizing) {
return;
} else {
return this.receiveMessage();
}
}
this.privServiceHasSentMessage = true;
const connectionMessage = SpeechConnectionMessage.fromConnectionMessage(message);
if (connectionMessage.requestId.toLowerCase() === this.privRequestSession.requestId.toLowerCase()) {
switch (connectionMessage.path.toLowerCase()) {
case "turn.start":
this.privMustReportEndOfStream = true;
this.privRequestSession.onServiceTurnStartResponse();
break;
case "speech.startdetected":
const speechStartDetected: SpeechDetected = SpeechDetected.fromJSON(connectionMessage.textBody);
const speechStartEventArgs = new RecognitionEventArgs(speechStartDetected.Offset, this.privRequestSession.sessionId);
if (!!this.privRecognizer.speechStartDetected) {
this.privRecognizer.speechStartDetected(this.privRecognizer, speechStartEventArgs);
}
break;
case "speech.enddetected":
let json: string;
if (connectionMessage.textBody.length > 0) {
json = connectionMessage.textBody;
} else {
// If the request was empty, the JSON returned is empty.
json = "{ Offset: 0 }";
}
const speechStopDetected: SpeechDetected = SpeechDetected.fromJSON(json);
// Only shrink the buffers for continuous recognition.
// For single shot, the speech.phrase message will come after the speech.end and it should own buffer shrink.
if (this.privRecognizerConfig.isContinuousRecognition) {
this.privRequestSession.onServiceRecognized(speechStopDetected.Offset + this.privRequestSession.currentTurnAudioOffset);
}
const speechStopEventArgs = new RecognitionEventArgs(speechStopDetected.Offset + this.privRequestSession.currentTurnAudioOffset, this.privRequestSession.sessionId);
if (!!this.privRecognizer.speechEndDetected) {
this.privRecognizer.speechEndDetected(this.privRecognizer, speechStopEventArgs);
}
break;
case "turn.end":
await this.sendTelemetryData();
if (this.privRequestSession.isSpeechEnded && this.privMustReportEndOfStream) {
this.privMustReportEndOfStream = false;
await this.cancelRecognitionLocal(CancellationReason.EndOfStream, CancellationErrorCode.NoError, undefined);
}
const sessionStopEventArgs: SessionEventArgs = new SessionEventArgs(this.privRequestSession.sessionId);
await this.privRequestSession.onServiceTurnEndResponse(this.privRecognizerConfig.isContinuousRecognition);
if (!this.privRecognizerConfig.isContinuousRecognition || this.privRequestSession.isSpeechEnded || !this.privRequestSession.isRecognizing) {
if (!!this.privRecognizer.sessionStopped) {
this.privRecognizer.sessionStopped(this.privRecognizer, sessionStopEventArgs);
}
return;
} else {
connection = await this.fetchConnection();
await this.sendPrePayloadJSON(connection);
}
break;
default:
if (!await this.processTypeSpecificMessages(connectionMessage)) {
// here are some messages that the derived class has not processed, dispatch them to connect class
if (!!this.privServiceEvents) {
this.serviceEvents.onEvent(new ServiceEvent(connectionMessage.path.toLowerCase(), connectionMessage.textBody));
}
}
}
}
return this.receiveMessage();
} catch (error) {
return null;
}
}