in gst/gst-kvs-plugin/src/KvsWebRtc.c [663:752]
STATUS createWebRtcStreamingSession(PGstKvsPlugin pGstKvsPlugin, PCHAR peerId, BOOL isMaster, PWebRtcStreamingSession* ppStreamingSession)
{
STATUS retStatus = STATUS_SUCCESS;
RtcMediaStreamTrack videoTrack, audioTrack;
PWebRtcStreamingSession pStreamingSession = NULL;
MEMSET(&videoTrack, 0x00, SIZEOF(RtcMediaStreamTrack));
MEMSET(&audioTrack, 0x00, SIZEOF(RtcMediaStreamTrack));
CHK(pGstKvsPlugin != NULL && ppStreamingSession != NULL, STATUS_NULL_ARG);
CHK((isMaster && peerId != NULL) || !isMaster, STATUS_INVALID_ARG);
pStreamingSession = (PWebRtcStreamingSession) MEMCALLOC(1, SIZEOF(WebRtcStreamingSession));
CHK(pStreamingSession != NULL, STATUS_NOT_ENOUGH_MEMORY);
if (isMaster) {
STRCPY(pStreamingSession->peerId, peerId);
} else {
STRCPY(pStreamingSession->peerId, DEFAULT_VIEWER_CLIENT_ID);
}
ATOMIC_STORE_BOOL(&pStreamingSession->peerIdReceived, TRUE);
pStreamingSession->pGstKvsPlugin = pGstKvsPlugin;
pStreamingSession->rtcMetricsHistory.prevTs = GETTIME();
// if we're the viewer, we control the trickle ice mode
pStreamingSession->remoteCanTrickleIce = !isMaster && pGstKvsPlugin->gstParams.trickleIce;
ATOMIC_STORE_BOOL(&pStreamingSession->terminateFlag, FALSE);
ATOMIC_STORE_BOOL(&pStreamingSession->candidateGatheringDone, FALSE);
ATOMIC_STORE_BOOL(&pStreamingSession->connected, FALSE);
CHK_STATUS(initializePeerConnection(pGstKvsPlugin, &pStreamingSession->pPeerConnection));
CHK_STATUS(peerConnectionOnIceCandidate(pStreamingSession->pPeerConnection, (UINT64) pStreamingSession, onIceCandidateHandler));
CHK_STATUS(peerConnectionOnConnectionStateChange(pStreamingSession->pPeerConnection, (UINT64) pStreamingSession, onConnectionStateChange));
if (pGstKvsPlugin->onDataChannel != NULL) {
CHK_STATUS(peerConnectionOnDataChannel(pStreamingSession->pPeerConnection, (UINT64) pStreamingSession, pGstKvsPlugin->onDataChannel));
}
// Declare that we support H264,Profile=42E01F,level-asymmetry-allowed=1,packetization-mode=1 and Opus
CHK_STATUS(addSupportedCodec(pStreamingSession->pPeerConnection, RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE));
CHK_STATUS(addSupportedCodec(pStreamingSession->pPeerConnection, RTC_CODEC_VP8));
CHK_STATUS(addSupportedCodec(pStreamingSession->pPeerConnection, RTC_CODEC_OPUS));
CHK_STATUS(addSupportedCodec(pStreamingSession->pPeerConnection, RTC_CODEC_MULAW));
CHK_STATUS(addSupportedCodec(pStreamingSession->pPeerConnection, RTC_CODEC_ALAW));
// Add a SendRecv Transceiver of type video
videoTrack.kind = MEDIA_STREAM_TRACK_KIND_VIDEO;
videoTrack.codec = RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE;
STRCPY(videoTrack.streamId, "myKvsVideoStream");
STRCPY(videoTrack.trackId, "myVideoTrack");
CHK_STATUS(addTransceiver(pStreamingSession->pPeerConnection, &videoTrack, NULL, &pStreamingSession->pVideoRtcRtpTransceiver));
CHK_STATUS(
transceiverOnBandwidthEstimation(pStreamingSession->pVideoRtcRtpTransceiver, (UINT64) pStreamingSession, sampleBandwidthEstimationHandler));
// Set up audio transceiver codec id according to type of encoding used
if (STRNCMP(pGstKvsPlugin->gstParams.audioContentType, AUDIO_MULAW_CONTENT_TYPE, MAX_GSTREAMER_MEDIA_TYPE_LEN) == 0) {
audioTrack.codec = RTC_CODEC_MULAW;
} else if (STRNCMP(pGstKvsPlugin->gstParams.audioContentType, AUDIO_ALAW_CONTENT_TYPE, MAX_GSTREAMER_MEDIA_TYPE_LEN) == 0) {
audioTrack.codec = RTC_CODEC_ALAW;
} else if (STRNCMP(pGstKvsPlugin->gstParams.audioContentType, AUDIO_OPUS_CONTENT_TYPE, MAX_GSTREAMER_MEDIA_TYPE_LEN) == 0) {
audioTrack.codec = RTC_CODEC_OPUS;
} else {
DLOGE("Error, audio content type %s not accepted by plugin", pGstKvsPlugin->gstParams.audioContentType);
CHK(FALSE, STATUS_INVALID_ARG);
}
// Add a SendRecv Transceiver of type video
audioTrack.kind = MEDIA_STREAM_TRACK_KIND_AUDIO;
STRCPY(audioTrack.streamId, "myKvsVideoStream");
STRCPY(audioTrack.trackId, "myAudioTrack");
CHK_STATUS(addTransceiver(pStreamingSession->pPeerConnection, &audioTrack, NULL, &pStreamingSession->pAudioRtcRtpTransceiver));
CHK_STATUS(
transceiverOnBandwidthEstimation(pStreamingSession->pAudioRtcRtpTransceiver, (UINT64) pStreamingSession, sampleBandwidthEstimationHandler));
pStreamingSession->firstFrame = TRUE;
pStreamingSession->startUpLatency = 0;
CleanUp:
if (STATUS_FAILED(retStatus) && pStreamingSession != NULL) {
freeWebRtcStreamingSession(&pStreamingSession);
pStreamingSession = NULL;
}
if (ppStreamingSession != NULL) {
*ppStreamingSession = pStreamingSession;
}
return retStatus;
}