2021-10-25 23:30:36 -04:00
// Copyright Epic Games, Inc. All Rights Reserved.
2021-12-09 23:32:07 -05:00
function webRtcPlayer ( parOptions ) {
parOptions = typeof parOptions !== 'undefined' ? parOptions : { } ;
var self = this ;
const urlParams = new URLSearchParams ( window . location . search ) ;
//**********************
//Config setup
//**********************
this . cfg = typeof parOptions . peerConnectionOptions !== 'undefined' ? parOptions . peerConnectionOptions : { } ;
this . cfg . sdpSemantics = 'unified-plan' ;
// If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
// However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
// tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
this . cfg . offerExtmapAllowMixed = false ;
this . forceTURN = urlParams . has ( 'ForceTURN' ) ;
if ( this . forceTURN )
{
console . log ( "Forcing TURN usage by setting ICE Transport Policy in peer connection config." ) ;
this . cfg . iceTransportPolicy = "relay" ;
2021-10-25 23:30:36 -04:00
}
2021-12-09 23:32:07 -05:00
this . cfg . bundlePolicy = "balanced" ;
this . forceMaxBundle = urlParams . has ( 'ForceMaxBundle' ) ;
if ( this . forceMaxBundle )
{
this . cfg . bundlePolicy = "max-bundle" ;
}
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
//**********************
//Variables
//**********************
this . pcClient = null ;
this . dcClient = null ;
this . tnClient = null ;
2022-02-10 21:16:24 -05:00
this . sfu = false ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
this . sdpConstraints = {
offerToReceiveAudio : 1 , //Note: if you don't need audio you can get improved latency by turning this off.
offerToReceiveVideo : 1 ,
voiceActivityDetection : false
} ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
// See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
this . dataChannelOptions = { ordered : true } ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
// This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
this . startVideoMuted = typeof parOptions . startVideoMuted !== 'undefined' ? parOptions . startVideoMuted : false ;
this . autoPlayAudio = typeof parOptions . autoPlayAudio !== 'undefined' ? parOptions . autoPlayAudio : true ;
2021-10-25 23:30:36 -04:00
2022-05-24 03:43:51 -04:00
// To force mono playback of WebRTC audio
this . forceMonoAudio = urlParams . has ( 'ForceMonoAudio' ) ;
if ( this . forceMonoAudio ) {
console . log ( "Will attempt to force mono audio by munging the sdp in the browser." )
}
2021-12-09 23:32:07 -05:00
// To enable mic in browser use SSL/localhost and have ?useMic in the query string.
this . useMic = urlParams . has ( 'useMic' ) ;
2022-05-24 03:43:51 -04:00
if ( ! this . useMic ) {
2021-12-09 23:32:07 -05:00
console . log ( "Microphone access is not enabled. Pass ?useMic in the url to enable it." ) ;
}
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
// When ?useMic check for SSL or localhost
let isLocalhostConnection = location . hostname === "localhost" || location . hostname === "127.0.0.1" ;
let isHttpsConnection = location . protocol === 'https:' ;
if ( this . useMic && ! isLocalhostConnection && ! isHttpsConnection )
{
this . useMic = false ;
console . error ( "Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access." ) ;
console . error ( "For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'" ) ;
}
2021-10-25 23:30:36 -04:00
2022-01-31 02:40:37 -05:00
// Prefer SFU or P2P connection
this . preferSFU = urlParams . has ( 'preferSFU' ) ;
console . log ( this . preferSFU ?
"The browser will signal it would prefer an SFU connection. Remove ?preferSFU from the url to signal for P2P usage." :
"The browser will signal for a P2P connection. Pass ?preferSFU in the url to signal for SFU usage." ) ;
2021-12-09 23:32:07 -05:00
// Latency tester
this . latencyTestTimings =
{
TestStartTimeMs : null ,
UEReceiptTimeMs : null ,
2022-01-02 20:27:41 -05:00
UEEncodeMs : null ,
UECaptureToSendMs : null ,
2021-12-09 23:32:07 -05:00
UETransmissionTimeMs : null ,
BrowserReceiptTimeMs : null ,
FrameDisplayDeltaTimeMs : null ,
Reset : function ( )
2021-10-25 23:30:36 -04:00
{
2021-12-09 23:32:07 -05:00
this . TestStartTimeMs = null ;
this . UEReceiptTimeMs = null ;
2022-01-02 20:27:41 -05:00
this . UEEncodeMs = null ,
this . UECaptureToSendMs = null ,
2021-12-09 23:32:07 -05:00
this . UETransmissionTimeMs = null ;
this . BrowserReceiptTimeMs = null ;
this . FrameDisplayDeltaTimeMs = null ;
} ,
SetUETimings : function ( UETimings )
2021-10-25 23:30:36 -04:00
{
2021-12-09 23:32:07 -05:00
this . UEReceiptTimeMs = UETimings . ReceiptTimeMs ;
2022-01-02 20:27:41 -05:00
this . UEEncodeMs = UETimings . EncodeMs ,
this . UECaptureToSendMs = UETimings . CaptureToSendMs ,
2021-12-09 23:32:07 -05:00
this . UETransmissionTimeMs = UETimings . TransmissionTimeMs ;
this . BrowserReceiptTimeMs = Date . now ( ) ;
this . OnAllLatencyTimingsReady ( this ) ;
} ,
SetFrameDisplayDeltaTime : function ( DeltaTimeMs )
2021-10-25 23:30:36 -04:00
{
2021-12-09 23:32:07 -05:00
if ( this . FrameDisplayDeltaTimeMs == null )
2021-10-25 23:30:36 -04:00
{
2021-12-09 23:32:07 -05:00
this . FrameDisplayDeltaTimeMs = Math . round ( DeltaTimeMs ) ;
2021-10-25 23:30:36 -04:00
this . OnAllLatencyTimingsReady ( this ) ;
2021-12-09 23:32:07 -05:00
}
} ,
OnAllLatencyTimingsReady : function ( Timings ) { }
}
//**********************
//Functions
//**********************
//Create Video element and expose that as a parameter
this . createWebRtcVideo = function ( ) {
var video = document . createElement ( 'video' ) ;
video . id = "streamingVideo" ;
video . playsInline = true ;
2022-03-24 02:25:12 -04:00
video . disablePictureInPicture = true ;
2021-12-09 23:32:07 -05:00
video . muted = self . startVideoMuted ; ;
video . addEventListener ( 'loadedmetadata' , function ( e ) {
if ( self . onVideoInitialised ) {
self . onVideoInitialised ( ) ;
}
} , true ) ;
// Check if request video frame callback is supported
if ( 'requestVideoFrameCallback' in HTMLVideoElement . prototype ) {
// The API is supported!
const onVideoFrameReady = ( now , metadata ) => {
if ( metadata . receiveTime && metadata . expectedDisplayTime )
2021-10-25 23:30:36 -04:00
{
2021-12-09 23:32:07 -05:00
const receiveToCompositeMs = metadata . presentationTime - metadata . receiveTime ;
self . aggregatedStats . receiveToCompositeMs = receiveToCompositeMs ;
2021-12-01 16:57:33 -05:00
}
2021-12-09 23:32:07 -05:00
// Re-register the callback to be notified about the next frame.
video . requestVideoFrameCallback ( onVideoFrameReady ) ;
} ;
// Initially register the callback to be notified about the first frame.
video . requestVideoFrameCallback ( onVideoFrameReady ) ;
2021-10-25 23:30:36 -04:00
}
2021-12-09 23:32:07 -05:00
return video ;
}
2021-10-25 23:30:36 -04:00
2022-05-24 04:02:02 -04:00
this . createWebRtcAudio = function ( ) {
var audio = document . createElement ( 'audio' ) ;
audio . id = 'streamingAudio' ;
return audio ;
}
2021-12-09 23:32:07 -05:00
this . video = this . createWebRtcVideo ( ) ;
2022-05-24 04:02:02 -04:00
this . audio = this . createWebRtcAudio ( ) ;
2021-12-09 23:32:07 -05:00
this . availableVideoStreams = new Map ( ) ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
onsignalingstatechange = function ( state ) {
console . info ( 'Signaling state change. |' , state . srcElement . signalingState , "|" )
} ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
oniceconnectionstatechange = function ( state ) {
console . info ( 'Browser ICE connection |' , state . srcElement . iceConnectionState , '|' )
} ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
onicegatheringstatechange = function ( state ) {
console . info ( 'Browser ICE gathering |' , state . srcElement . iceGatheringState , '|' )
} ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
handleOnTrack = function ( e ) {
if ( e . track )
{
console . log ( 'Got track. | Kind=' + e . track . kind + ' | Id=' + e . track . id + ' | readyState=' + e . track . readyState + ' |' ) ;
}
if ( e . track . kind == "audio" )
{
handleOnAudioTrack ( e . streams [ 0 ] ) ;
return ;
}
else ( e . track . kind == "video" )
{
for ( const s of e . streams ) {
if ( ! self . availableVideoStreams . has ( s . id ) ) {
self . availableVideoStreams . set ( s . id , s ) ;
2021-10-25 23:30:36 -04:00
}
}
2022-01-31 02:40:37 -05:00
self . video . srcObject = e . streams [ 0 ] ;
2021-12-30 19:20:56 -05:00
// All tracks are added "muted" by WebRTC/browser and become unmuted when media is being sent
e . track . onunmute = ( ) => {
self . video . srcObject = e . streams [ 0 ] ;
2021-12-09 23:32:07 -05:00
self . onNewVideoTrack ( e . streams ) ;
2021-12-30 19:20:56 -05:00
}
2021-12-09 23:32:07 -05:00
}
} ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
handleOnAudioTrack = function ( audioMediaStream )
{
// do nothing the video has the same media stream as the audio track we have here (they are linked)
if ( self . video . srcObject == audioMediaStream )
{
return ;
}
// video element has some other media stream that is not associated with this audio track
else if ( self . video . srcObject && self . video . srcObject !== audioMediaStream )
{
2022-05-24 04:02:02 -04:00
self . audio . srcObject = audioMediaStream ;
2021-10-25 23:30:36 -04:00
}
2021-12-09 23:32:07 -05:00
}
onDataChannel = function ( dataChannelEvent ) {
// This is the primary data channel code path when we are "receiving"
console . log ( "Data channel created for us by browser as we are a receiving peer." ) ;
self . dcClient = dataChannelEvent . channel ;
setupDataChannelCallbacks ( self . dcClient ) ;
}
createDataChannel = function ( pc , label , options ) {
// This is the primary data channel code path when we are "offering"
let datachannel = pc . createDataChannel ( label , options ) ;
console . log ( ` Created datachannel ( ${ label } ) ` ) ;
setupDataChannelCallbacks ( datachannel ) ;
return datachannel ;
}
setupDataChannelCallbacks = function ( datachannel ) {
try {
// Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
datachannel . binaryType = "arraybuffer" ;
2022-05-24 03:43:51 -04:00
datachannel . addEventListener ( 'open' , e => {
console . log ( ` Data channel connected: ${ datachannel . label } ( ${ datachannel . id } ) ` ) ;
if ( self . onDataChannelConnected ) {
self . onDataChannelConnected ( ) ;
}
} ) ;
2021-10-25 23:30:36 -04:00
2022-05-24 03:43:51 -04:00
datachannel . addEventListener ( 'close' , e => {
console . log ( ` Data channel disconnected: ${ datachannel . label } ( ${ datachannel . id } ` , e ) ;
} ) ;
2021-12-01 16:57:33 -05:00
2022-05-24 03:43:51 -04:00
datachannel . addEventListener ( 'message' , e => {
2021-12-09 23:32:07 -05:00
if ( self . onDataChannelMessage ) {
self . onDataChannelMessage ( e . data ) ;
2021-12-01 16:57:33 -05:00
}
2022-05-24 03:43:51 -04:00
} ) ;
2021-12-01 16:57:33 -05:00
2022-05-24 03:43:51 -04:00
datachannel . addEventListener ( 'error' , e => {
console . error ( ` Data channel error: ${ datachannel . label } ( ${ datachannel . id } ` , e ) ;
} ) ;
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
return datachannel ;
} catch ( e ) {
2022-05-24 03:43:51 -04:00
console . warn ( 'Datachannel setup caused an exception: ' , e ) ;
2021-12-09 23:32:07 -05:00
return null ;
2021-10-25 23:30:36 -04:00
}
2021-12-09 23:32:07 -05:00
}
2021-10-25 23:30:36 -04:00
2021-12-09 23:32:07 -05:00
onicecandidate = function ( e ) {
let candidate = e . candidate ;
if ( candidate && candidate . candidate ) {
console . log ( "%c[Browser ICE candidate]" , "background: violet; color: black" , "| Type=" , candidate . type , "| Protocol=" , candidate . protocol , "| Address=" , candidate . address , "| Port=" , candidate . port , "|" ) ;
self . onWebRtcCandidate ( candidate ) ;
2021-10-25 23:30:36 -04:00
}
} ;
2021-12-09 23:32:07 -05:00
handleCreateOffer = function ( pc ) {
pc . createOffer ( self . sdpConstraints ) . then ( function ( offer ) {
// Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
2022-02-10 21:16:24 -05:00
mungeSDP ( offer ) ;
2021-12-09 23:32:07 -05:00
// Set our munged SDP on the local peer connection so it is "set" and will be send across
pc . setLocalDescription ( offer ) ;
if ( self . onWebRtcOffer ) {
self . onWebRtcOffer ( offer ) ;
}
} ,
function ( ) { console . warn ( "Couldn't create offer" ) } ) ;
}
2022-02-10 21:16:24 -05:00
mungeSDP = function ( offer ) {
2021-12-09 23:32:07 -05:00
2022-05-24 03:43:51 -04:00
let audioSDP = '' ;
// set max bitrate to highest bitrate Opus supports
audioSDP += 'maxaveragebitrate=510000;' ;
2022-02-10 21:16:24 -05:00
if ( self . useMic ) {
2022-05-24 03:43:51 -04:00
// set the max capture rate to 48khz (so we can send high quality audio from mic)
audioSDP += 'sprop-maxcapturerate=48000;' ;
2022-02-10 21:16:24 -05:00
}
2021-12-09 23:32:07 -05:00
2022-05-24 03:43:51 -04:00
// Force mono or stereo based on whether ?forceMono was passed or not
audioSDP += self . forceMonoAudio ? 'sprop-stereo=0;stereo=0;' : 'sprop-stereo=1;stereo=1;' ;
// enable in-band forward error correction for opus audio
audioSDP += 'useinbandfec=1' ;
// We use the line 'useinbandfec=1' (which Opus uses) to set our Opus specific audio parameters.
offer . sdp = offer . sdp . replace ( 'useinbandfec=1' , audioSDP ) ;
2021-12-09 23:32:07 -05:00
}
setupPeerConnection = function ( pc ) {
//Setup peerConnection events
pc . onsignalingstatechange = onsignalingstatechange ;
pc . oniceconnectionstatechange = oniceconnectionstatechange ;
pc . onicegatheringstatechange = onicegatheringstatechange ;
pc . ontrack = handleOnTrack ;
pc . onicecandidate = onicecandidate ;
pc . ondatachannel = onDataChannel ;
} ;
generateAggregatedStatsFunction = function ( ) {
if ( ! self . aggregatedStats )
self . aggregatedStats = { } ;
return function ( stats ) {
2022-05-24 03:43:51 -04:00
let newStat = { } ;
// store each type of codec we can get stats on
newStat . codecs = { } ;
2021-12-09 23:32:07 -05:00
stats . forEach ( stat => {
2022-05-24 03:43:51 -04:00
// Get the inbound-rtp for video
if ( stat . type === 'inbound-rtp'
2021-12-09 23:32:07 -05:00
&& ! stat . isRemote
2022-05-24 03:43:51 -04:00
&& ( stat . mediaType === 'video' || stat . id . toLowerCase ( ) . includes ( 'video' ) ) ) {
2021-12-09 23:32:07 -05:00
newStat . timestamp = stat . timestamp ;
newStat . bytesReceived = stat . bytesReceived ;
newStat . framesDecoded = stat . framesDecoded ;
newStat . packetsLost = stat . packetsLost ;
newStat . bytesReceivedStart = self . aggregatedStats && self . aggregatedStats . bytesReceivedStart ? self . aggregatedStats . bytesReceivedStart : stat . bytesReceived ;
newStat . framesDecodedStart = self . aggregatedStats && self . aggregatedStats . framesDecodedStart ? self . aggregatedStats . framesDecodedStart : stat . framesDecoded ;
newStat . timestampStart = self . aggregatedStats && self . aggregatedStats . timestampStart ? self . aggregatedStats . timestampStart : stat . timestamp ;
if ( self . aggregatedStats && self . aggregatedStats . timestamp ) {
2022-05-24 03:43:51 -04:00
// Get the mimetype of the video codec being used
if ( stat . codecId && self . aggregatedStats . codecs && self . aggregatedStats . codecs . hasOwnProperty ( stat . codecId ) ) {
newStat . videoCodec = self . aggregatedStats . codecs [ stat . codecId ] ;
}
2021-12-09 23:32:07 -05:00
if ( self . aggregatedStats . bytesReceived ) {
// bitrate = bits received since last time / number of ms since last time
//This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other)
newStat . bitrate = 8 * ( newStat . bytesReceived - self . aggregatedStats . bytesReceived ) / ( newStat . timestamp - self . aggregatedStats . timestamp ) ;
newStat . bitrate = Math . floor ( newStat . bitrate ) ;
newStat . lowBitrate = self . aggregatedStats . lowBitrate && self . aggregatedStats . lowBitrate < newStat . bitrate ? self . aggregatedStats . lowBitrate : newStat . bitrate
newStat . highBitrate = self . aggregatedStats . highBitrate && self . aggregatedStats . highBitrate > newStat . bitrate ? self . aggregatedStats . highBitrate : newStat . bitrate
}
if ( self . aggregatedStats . bytesReceivedStart ) {
newStat . avgBitrate = 8 * ( newStat . bytesReceived - self . aggregatedStats . bytesReceivedStart ) / ( newStat . timestamp - self . aggregatedStats . timestampStart ) ;
newStat . avgBitrate = Math . floor ( newStat . avgBitrate ) ;
}
if ( self . aggregatedStats . framesDecoded ) {
// framerate = frames decoded since last time / number of seconds since last time
newStat . framerate = ( newStat . framesDecoded - self . aggregatedStats . framesDecoded ) / ( ( newStat . timestamp - self . aggregatedStats . timestamp ) / 1000 ) ;
newStat . framerate = Math . floor ( newStat . framerate ) ;
newStat . lowFramerate = self . aggregatedStats . lowFramerate && self . aggregatedStats . lowFramerate < newStat . framerate ? self . aggregatedStats . lowFramerate : newStat . framerate
newStat . highFramerate = self . aggregatedStats . highFramerate && self . aggregatedStats . highFramerate > newStat . framerate ? self . aggregatedStats . highFramerate : newStat . framerate
}
if ( self . aggregatedStats . framesDecodedStart ) {
newStat . avgframerate = ( newStat . framesDecoded - self . aggregatedStats . framesDecodedStart ) / ( ( newStat . timestamp - self . aggregatedStats . timestampStart ) / 1000 ) ;
newStat . avgframerate = Math . floor ( newStat . avgframerate ) ;
}
}
}
2022-05-24 03:43:51 -04:00
// Get inbound-rtp for audio
if ( stat . type === 'inbound-rtp'
&& ! stat . isRemote
&& ( stat . mediaType === 'audio' || stat . id . toLowerCase ( ) . includes ( 'audio' ) ) ) {
// Get audio bytes received
if ( stat . bytesReceived ) {
newStat . audioBytesReceived = stat . bytesReceived ;
}
// As we loop back through we may wish to compute some stats based on a delta of the previous time we recorded the stat
if ( self . aggregatedStats && self . aggregatedStats . timestamp ) {
// Get the mimetype of the audio codec being used
if ( stat . codecId && self . aggregatedStats . codecs && self . aggregatedStats . codecs . hasOwnProperty ( stat . codecId ) ) {
newStat . audioCodec = self . aggregatedStats . codecs [ stat . codecId ] ;
}
// Determine audio bitrate delta over the time period
if ( self . aggregatedStats . audioBytesReceived ) {
newStat . audioBitrate = 8 * ( newStat . audioBytesReceived - self . aggregatedStats . audioBytesReceived ) / ( stat . timestamp - self . aggregatedStats . timestamp ) ;
newStat . audioBitrate = Math . floor ( newStat . audioBitrate ) ;
}
}
}
2021-12-09 23:32:07 -05:00
//Read video track stats
2022-05-24 03:43:51 -04:00
if ( stat . type === 'track' && ( stat . trackIdentifier === 'video_label' || stat . kind === 'video' ) ) {
2021-12-09 23:32:07 -05:00
newStat . framesDropped = stat . framesDropped ;
newStat . framesReceived = stat . framesReceived ;
newStat . framesDroppedPercentage = stat . framesDropped / stat . framesReceived * 100 ;
newStat . frameHeight = stat . frameHeight ;
newStat . frameWidth = stat . frameWidth ;
newStat . frameHeightStart = self . aggregatedStats && self . aggregatedStats . frameHeightStart ? self . aggregatedStats . frameHeightStart : stat . frameHeight ;
newStat . frameWidthStart = self . aggregatedStats && self . aggregatedStats . frameWidthStart ? self . aggregatedStats . frameWidthStart : stat . frameWidth ;
}
2022-05-24 03:43:51 -04:00
if ( stat . type === 'candidate-pair' && stat . hasOwnProperty ( 'currentRoundTripTime' ) && stat . currentRoundTripTime != 0 ) {
2021-12-09 23:32:07 -05:00
newStat . currentRoundTripTime = stat . currentRoundTripTime ;
}
2022-05-24 03:43:51 -04:00
// Store mimetype of each codec
if ( newStat . hasOwnProperty ( 'codecs' ) && stat . type === 'codec' && stat . mimeType && stat . id ) {
const codecId = stat . id ;
const codecType = stat . mimeType . replace ( "video/" , "" ) . replace ( "audio/" , "" ) ;
newStat . codecs [ codecId ] = codecType ;
}
2021-12-09 23:32:07 -05:00
} ) ;
if ( self . aggregatedStats . receiveToCompositeMs )
{
newStat . receiveToCompositeMs = self . aggregatedStats . receiveToCompositeMs ;
self . latencyTestTimings . SetFrameDisplayDeltaTime ( self . aggregatedStats . receiveToCompositeMs ) ;
}
2022-05-24 03:43:51 -04:00
2021-12-09 23:32:07 -05:00
self . aggregatedStats = newStat ;
if ( self . onAggregatedStats )
self . onAggregatedStats ( newStat )
}
} ;
setupTransceiversAsync = async function ( pc ) {
2022-02-02 02:21:36 -05:00
let hasTransceivers = pc . getTransceivers ( ) . length > 0 ;
2021-12-09 23:32:07 -05:00
// Setup a transceiver for getting UE video
pc . addTransceiver ( "video" , { direction : "recvonly" } ) ;
// Setup a transceiver for sending mic audio to UE and receiving audio from UE
if ( ! self . useMic )
{
pc . addTransceiver ( "audio" , { direction : "recvonly" } ) ;
}
else
{
let audioSendOptions = self . useMic ?
{
autoGainControl : false ,
channelCount : 1 ,
echoCancellation : false ,
latency : 0 ,
noiseSuppression : false ,
2022-02-02 02:21:36 -05:00
sampleRate : 48000 ,
2022-02-10 21:16:24 -05:00
sampleSize : 16 ,
2021-12-09 23:32:07 -05:00
volume : 1.0
} : false ;
// Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
const stream = await navigator . mediaDevices . getUserMedia ( { video : false , audio : audioSendOptions } ) ;
if ( stream )
{
2022-02-02 02:21:36 -05:00
if ( hasTransceivers ) {
for ( let transceiver of pc . getTransceivers ( ) ) {
if ( transceiver && transceiver . receiver && transceiver . receiver . track && transceiver . receiver . track . kind === "audio" )
{
for ( const track of stream . getTracks ( ) ) {
if ( track . kind && track . kind == "audio" )
{
transceiver . sender . replaceTrack ( track ) ;
transceiver . direction = "sendrecv" ;
}
}
}
}
}
else
{
for ( const track of stream . getTracks ( ) ) {
if ( track . kind && track . kind == "audio" )
{
pc . addTransceiver ( track , { direction : "sendrecv" } ) ;
}
2021-12-09 23:32:07 -05:00
}
}
}
else
{
pc . addTransceiver ( "audio" , { direction : "recvonly" } ) ;
}
}
} ;
//**********************
//Public functions
//**********************
this . setVideoEnabled = function ( enabled ) {
self . video . srcObject . getTracks ( ) . forEach ( track => track . enabled = enabled ) ;
}
this . startLatencyTest = function ( onTestStarted ) {
// Can't start latency test without a video element
if ( ! self . video )
{
return ;
}
self . latencyTestTimings . Reset ( ) ;
self . latencyTestTimings . TestStartTimeMs = Date . now ( ) ;
onTestStarted ( self . latencyTestTimings . TestStartTimeMs ) ;
}
//This is called when revceiving new ice candidates individually instead of part of the offer
this . handleCandidateFromServer = function ( iceCandidate ) {
let candidate = new RTCIceCandidate ( iceCandidate ) ;
console . log ( "%c[Unreal ICE candidate]" , "background: pink; color: black" , "| Type=" , candidate . type , "| Protocol=" , candidate . protocol , "| Address=" , candidate . address , "| Port=" , candidate . port , "|" ) ;
// if forcing TURN, reject any candidates not relay
if ( self . forceTURN )
{
// check if no relay address is found, if so, we are assuming it means no TURN server
if ( candidate . candidate . indexOf ( "relay" ) < 0 ) {
2022-01-31 02:40:37 -05:00
console . warn ( "Dropping candidate because it was not TURN relay." , "| Type=" , candidate . type , "| Protocol=" , candidate . protocol , "| Address=" , candidate . address , "| Port=" , candidate . port , "|" )
2021-12-09 23:32:07 -05:00
return ;
}
}
self . pcClient . addIceCandidate ( candidate ) . catch ( function ( e ) {
console . error ( "Failed to add ICE candidate" , e ) ;
} ) ;
} ;
//Called externaly to create an offer for the server
this . createOffer = function ( ) {
if ( self . pcClient ) {
console . log ( "Closing existing PeerConnection" )
self . pcClient . close ( ) ;
self . pcClient = null ;
}
self . pcClient = new RTCPeerConnection ( self . cfg ) ;
setupPeerConnection ( self . pcClient ) ;
setupTransceiversAsync ( self . pcClient ) . finally ( function ( )
{
self . dcClient = createDataChannel ( self . pcClient , 'cirrus' , self . dataChannelOptions ) ;
handleCreateOffer ( self . pcClient ) ;
} ) ;
2022-03-22 03:47:18 -04:00
2021-12-09 23:32:07 -05:00
} ;
//Called externaly when an offer is received from the server
this . receiveOffer = function ( offer ) {
2022-02-10 21:16:24 -05:00
if ( offer . sfu ) {
this . sfu = true ;
delete offer . sfu ;
}
2021-12-09 23:32:07 -05:00
if ( ! self . pcClient ) {
console . log ( "Creating a new PeerConnection in the browser." )
self . pcClient = new RTCPeerConnection ( self . cfg ) ;
setupPeerConnection ( self . pcClient ) ;
2022-02-02 02:21:36 -05:00
// Put things here that happen post transceiver setup
2022-02-10 21:16:24 -05:00
self . pcClient . setRemoteDescription ( offer )
2022-02-02 02:21:36 -05:00
. then ( ( ) =>
2021-12-09 23:32:07 -05:00
{
2022-02-02 02:21:36 -05:00
setupTransceiversAsync ( self . pcClient ) . finally ( function ( ) {
self . pcClient . createAnswer ( )
2022-02-10 21:16:24 -05:00
. then ( answer => {
mungeSDP ( answer ) ;
return self . pcClient . setLocalDescription ( answer ) ;
} )
2022-02-02 02:21:36 -05:00
. then ( ( ) => {
if ( self . onWebRtcAnswer ) {
self . onWebRtcAnswer ( self . pcClient . currentLocalDescription ) ;
}
} )
. then ( ( ) => {
let receivers = self . pcClient . getReceivers ( ) ;
for ( let receiver of receivers )
{
receiver . playoutDelayHint = 0 ;
}
} )
. catch ( ( error ) => console . error ( "createAnswer() failed:" , error ) ) ;
} ) ;
2021-12-09 23:32:07 -05:00
} ) ;
}
} ;
//Called externaly when an answer is received from the server
this . receiveAnswer = function ( answer ) {
2022-02-10 21:16:24 -05:00
self . pcClient . setRemoteDescription ( answer ) ;
2021-12-09 23:32:07 -05:00
} ;
2022-05-24 03:43:51 -04:00
this . receiveSFUPeerDataChannelRequest = function ( channelData ) {
2022-02-10 21:16:24 -05:00
const sendOptions = {
ordered : true ,
negotiated : true ,
id : channelData . sendStreamId
} ;
2022-05-24 03:43:51 -04:00
const unidirectional = channelData . sendStreamId != channelData . recvStreamId ;
const sendDataChannel = self . pcClient . createDataChannel ( unidirectional ? 'send-datachannel' : 'datachannel' , sendOptions ) ;
setupDataChannelCallbacks ( sendDataChannel ) ;
2022-02-10 21:16:24 -05:00
2022-05-24 03:43:51 -04:00
if ( unidirectional ) {
2022-02-10 21:16:24 -05:00
const recvOptions = {
ordered : true ,
negotiated : true ,
id : channelData . recvStreamId
} ;
2022-05-24 03:43:51 -04:00
const recvDataChannel = self . pcClient . createDataChannel ( 'recv-datachannel' , recvOptions ) ;
// when recv data channel is "open" we want to let SFU know so it can tell streamer
recvDataChannel . addEventListener ( 'open' , e => {
if ( self . onSFURecvDataChannelReady ) {
self . onSFURecvDataChannelReady ( ) ;
}
} ) ;
2022-02-10 21:16:24 -05:00
setupDataChannelCallbacks ( recvDataChannel ) ;
}
this . dcClient = sendDataChannel ;
}
2021-12-09 23:32:07 -05:00
this . close = function ( ) {
if ( self . pcClient ) {
console . log ( "Closing existing peerClient" )
self . pcClient . close ( ) ;
self . pcClient = null ;
}
2022-03-22 03:47:18 -04:00
if ( self . aggregateStatsIntervalId ) {
2021-12-09 23:32:07 -05:00
clearInterval ( self . aggregateStatsIntervalId ) ;
2022-03-22 03:47:18 -04:00
}
2021-12-09 23:32:07 -05:00
}
//Sends data across the datachannel
this . send = function ( data ) {
if ( self . dcClient && self . dcClient . readyState == 'open' ) {
//console.log('Sending data on dataconnection', self.dcClient)
self . dcClient . send ( data ) ;
}
} ;
this . getStats = function ( onStats ) {
if ( self . pcClient && onStats ) {
self . pcClient . getStats ( null ) . then ( ( stats ) => {
onStats ( stats ) ;
} ) ;
}
}
this . aggregateStats = function ( checkInterval ) {
let calcAggregatedStats = generateAggregatedStatsFunction ( ) ;
let printAggregatedStats = ( ) => { self . getStats ( calcAggregatedStats ) ; }
self . aggregateStatsIntervalId = setInterval ( printAggregatedStats , checkInterval ) ;
}
}