...
To analyze the source code take the display.js module version available here which can be downloaded with build 1.0.1.36
Local video capturing and displaying
1. Initialization
initLocalDisplay() code
The initLocalDisplay() returns the object to work with HTML5 tags to capture and display local video and audio
...
2.1. Add audio track to HTML5 video tag
add() code
Where:
- audio track is added to video tag
onended
event handler is added to audio track- click event handler for the audio mute/unmute button is added
Code Block |
---|
|
if (stream.getAudioTracks().length > 0) {
let videoElement = getAudioContainer();
if (videoElement) {
let track = stream.getAudioTracks()[0];
videoElement.video.srcObject.addTrack(track);
videoElement.audioStateDisplay.innerHTML = audioStateText(stream) + " " + type;
trackvideoElement.audioStateDisplay.addEventListener("endedclick", function() {
onMuteClick(videoElement.video.srcObject.removeTrack(trackaudioStateDisplay, stream, type);
});
videoElement.audioStateDisplay.innerHTML = "No audio";
track.addEventListener("ended", function() {
//check video element has no tracks left
videoElement.video.srcObject.removeTrack(track);
videoElement.audioStateDisplay.innerHTML = "No audio";
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(videoElement.video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(videoElement.id);
});
return;
}
} |
2.2. Container creation to display local video
add() code
Where:
- container
div
tag to display local video is created div
tag to display video information is created
Code Block |
---|
|
const coreDisplay = document.createElement('div'createContainer(null);
coreDisplay.setAttribute("class","text-center")id = stream.id;
coreDisplay.setAttribute("style","width: auto; height: auto;");
coreDisplay.id = stream.id;
const publisherNameDisplay = createInfoDisplay(coreDisplay, name + " " + type); |
2.3. Button creation to mute/unmute local audio
add() code
Where:
- button to mute/unmute local audio is created
Code Block |
---|
|
const streamNameDisplayaudioStateDisplay = document.createElement("divbutton");
streamNameDisplay.innerHTML = "Name: " + name;
coreDisplay.appendChild(audioStateDisplay); |
2.4. Tag creation to display local video
add() code
Where:
- contaner tag which can be resized to a parent node is created
- HTML5
video
tag is created (considering Safari publishing)
Code Block |
---|
|
const streamDisplay = createContainer(coreDisplay);
streamDisplay.id = "stream-" + id;
const video = document.createElement("video");
video.muted = true;
if(Browser().isSafariWebRTC()) {
video.setAttribute("playsinline", "");
video.setAttribute("webkit-playsinline", "");
}
streamDisplay.appendChild(video);
video.srcObject = stream; |
2.5. Video tag event handlers creation
add() code
Where:
- local video playback is started
onended
event handler is set up for video trackonresize
event handler is set up for local video to adjust video displaying size to the container dimensions
Code Block |
---|
|
video.onloadedmetadata = function (e) {
video.play();
};
stream.getTracks().forEach(function(track){
track.addEventListener("ended", function() {
video.srcObject.removeTrack(track);
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(id);
});
});
if (stream.getVideoTracks().length > 0) {
// Resize only if video displayed
video.addEventListener('resize', function (event) {
publisherNameDisplay.innerHTML = name + " " + type + " " + video.videoWidth + "x" + video.videoHeight;
resizeVideo(event.target);
});
} else {
// Hide audio only container
hideItem(streamDisplay);
// Set up mute button for audio only stream
audioStateDisplay.innerHTML = audioStateText(stream) + " " + type;
audioStateDisplay.addEventListener("click", function() {
onMuteClick(audioStateDisplay, stream, type);
});
} |
2.6. Video container addition to HTML page
add() code
Code Block |
---|
|
localDisplays[id] = coreDisplay;
localDisplayDiv.appendChild(coreDisplay);
return coreDisplay; |
3. Stop video and audio capturing
stop() code
Code Block |
---|
|
const stop = function () {
for (const [key, value] of Object.entries(localDisplays)) {
removeLocalDisplay(value.id);
}
} |
Room streams published displaying
1. Initialization
initRemoteDisplay() code
The initRemoteDisplay() function returns the object to work with HTML5 tags to display remote video and audio streams
Code Block |
---|
|
const initRemoteDisplay = function(options) {
const constants = SFU.constants;
const remoteParticipants = {};
// Validate options first
if (!options.div) {
throw new Error("Main div to place all the media tag is not defined");
}
if (!options.room) {
throw new Error("Room is not defined");
}
if (!options.peerConnection) {
throw new Error("PeerConnection is not defined");
}
let mainDiv = options.div;
let room = options.room;
let peerConnection = options.peerConnection;
let displayOptions = options.displayOptions || {publisher: true, quality: true, type: true};
...
const createRemoteDisplay = function(id, name, mainDiv, displayOptions) {
...
}
const stop = function() {
...
}
peerConnection.ontrack = ({transceiver}) => {
...
}
return {
stop: stop
}
} |
2. Room events handling
2.1. ADD_TRACKS
initRemoteDisplay() code
Where:
- a new participant is added to participants list
- tracks quality information is added to tracks list
- elements to display remote audion and video are created
Code Block |
---|
|
room.on(constants.SFU_ROOM_EVENT.ADD_TRACKS, function(e) {
console.log("Received ADD_TRACKS");
let participant = remoteParticipants[e.info.nickName];
if (!participant) {
participant = {};
participant.nickName = e.info.nickName;
participant.tracks = [];
participant.displays = [];
remoteParticipants[participant.nickName] = participant;
}
participant.tracks.push.apply(participant.tracks, e.info.info);
for (const pTrack of e.info.info) {
let createDisplay = true;
for (let i = 0; i < participant.displays.length; i++) {
let display = participant.displays[i];
if (pTrack.type === "VIDEO") {
if (display.hasVideo()) {
continue;
}
display.videoMid = pTrack.mid;
display.setTrackInfo(pTrack);
createDisplay = false;
break;
} else if (pTrack.type === "AUDIO") {
if (display.hasAudio()) {
continue;
}
display.audioMid = pTrack.mid;
display.setTrackInfo(pTrack);
createDisplay = false;
break;
}
}
if (!createDisplay) {
continue;
}
let display = createRemoteDisplay(participant.nickName, participant.nickName, mainDiv, displayOptions);
participant.displays.push(display);
if (pTrack.type === "VIDEO") {
display.videoMid = pTrack.mid;
display.setTrackInfo(pTrack);
} else if (pTrack.type === "AUDIO") {
display.audioMid = pTrack.mid;
display.setTrackInfo(pTrack);
}
}
...
}); |
2.2. REMOVE_TRACKS
initRemoteDisplay() code
Where:
- video elements are removed
- tracks data are dleteted from tracks list
Code Block |
---|
|
room.on(constants.SFU_ROOM_EVENT.ADD_TRACKS, function(e) {
...
}).on(constants.SFU_ROOM_EVENT.REMOVE_TRACKS, function(e) {
console.log("Received REMOVE_TRACKS");
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
return;
}
for (const rTrack of e.info.info) {
for (let i = 0; i < participant.tracks.length; i++) {
if (rTrack.mid === participant.tracks[i].mid) {
participant.tracks.splice(i, 1);
break;
}
}
for (let i = 0; i < participant.displays.length; i++) {
let found = false;
const display = participant.displays[i];
if (display.audioMid === rTrack.mid) {
display.setAudio(null);
found = true;
} else if (display.videoMid === rTrack.mid) {
display.setVideo(null);
found = true;
}
if (found) {
if (!display.hasAudio() && !display.hasVideo()) {
display.dispose();
participant.displays.splice(i, 1);
}
break;
}
}
}
...
}); |
2.3. LEFT
initRemoteDisplay() code
Where:
- participant is removed from participants list
- video elements are removed
Code Block |
---|
|
room.on(constants.SFU_ROOM_EVENT.ADD_TRACKS, function(e) {
...
}).on(constants.SFU_ROOM_EVENT.LEFT, function(e) {
console.log("Received LEFT");
let participant = remoteParticipants[e.name];
if (!participant) {
return;
}
participant.displays.forEach(function(display){
display.dispose();
})
delete remoteParticipants[e.name];
...
}); |
2.4. TRACK_QUALITY_STATE
initRemoteDisplay() code
Where:
- track quality data are updated
Code Block |
---|
|
room.on(constants.SFU_ROOM_EVENT.ADD_TRACKS, function(e) {
...
}).on(constants.SFU_ROOM_EVENT.TRACK_QUALITY_STATE, function(e){
console.log("Received track quality state");
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
return;
}
for (const rTrack of e.info.tracks) {
const mid = rTrack.mid;
for (let i = 0; i < participant.displays.length; i++) {
const display = participant.displays[i];
if (display.videoMid === mid) {
display.updateQualityInfo(rTrack.quality);
break;
streamNameDisplay.setAttribute("class","text-center");}
streamNameDisplay.setAttribute("style","width: auto; height: auto;"); }
coreDisplay.appendChild(streamNameDisplay}
}); |
2.3. Button creation to mute/unmute local audio
...
3. Tags to display remote video creation
3.1. Container div creation
createRemoteDisplay() code
Where:
...
- display parameters are set up
- container
div
tag for participants streams is created - child container
div
tag for a certain stream is created - container
div
tag for quality switch buttons is created
Code Block |
---|
|
const audioStateDisplaycell = document.createElement("buttondiv");
audioStateDisplay.innerHTML = audioStateText(streamcell.setAttribute("class", "text-center");
audioStateDisplay.addEventListener('click', function(){cell.id = id;
if (stream.getAudioTracks().length > 0) {mainDiv.appendChild(cell);
let publisherNameDisplay;
stream.getAudioTracks()[0].enabled = !(stream.getAudioTracks()[0].enabled);
let currentQualityDisplay;
let videoTypeDisplay;
let audioStateDisplay.innerHTMLabrQualityCheckPeriod = audioStateText(stream)ABR_QUALITY_CHECK_PERIOD;
let abrKeepOnGoodQuality = }
ABR_KEEP_ON_QUALITY;
let abrTryForUpperQuality })= ABR_TRY_UPPER_QUALITY;
coreDisplay.appendChild(audioStateDisplay); |
2.4. Tag creation to display local video
add() code
Where:
- contaner tag which can be resized to a parent node is created
- HTML5
video
tag is created (considering Safari publishing)
Code Block |
---|
|
if (displayOptions.abrQualityCheckPeriod !== undefined) {
const streamDisplayabrQualityCheckPeriod = document.createElement('div');
displayOptions.abrQualityCheckPeriod;
}
if streamDisplay(displayOptions.idabrKeepOnGoodQuality !== "stream-" + id;
undefined) {
streamDisplay.setAttribute("class","text-center");
abrKeepOnGoodQuality = displayOptions.abrKeepOnGoodQuality;
streamDisplay.setAttribute("style","width: auto; height: auto;"); }
coreDisplay.appendChild(streamDisplay);
if (displayOptions.abrTryForUpperQuality !== undefined) {
const videoabrTryForUpperQuality = document.createElement("video")displayOptions.abrTryForUpperQuality;
video.muted = true;}
if (Browser().isSafariWebRTC()!displayOptions.abr) {
video.setAttribute("playsinline", "")abrQualityCheckPeriod = 0;
video.setAttribute("webkit-playsinline", "")abrKeepOnGoodQuality = 0;
}
abrTryForUpperQuality streamDisplay.appendChild(video);
= 0;
video.srcObject = stream; |
2.5. Video tag event handlers creation
add() code
Where:
- local video playback is started
onended
event handler is set up for video trackonresize
event handler is set up for local video to adjust video displaying size to the container dimensions
Code Block |
---|
|
}
video.onloadedmetadata = functionif (edisplayOptions.publisher) {
video.play(publisherNameDisplay = createInfoDisplay(cell, "Published by: " + name);
};
if stream.getTracks().forEach(function(track)displayOptions.quality) {
track.addEventListener("ended", function() {
currentQualityDisplay = createInfoDisplay(cell, "");
}
if video.srcObject.removeTrack(track);(displayOptions.type) {
videoTypeDisplay = createInfoDisplay(cell, //check video element has no tracks left"");
}
const qualitySwitchDisplay for (const [key, vTrack] of Object.entries(video.srcObject.getTracks())) {= createInfoDisplay(cell, "");
let qualityDivs = [];
let if (vTrack.readyState !==contentType = "ended") {;
const rootDisplay = createContainer(cell);
const streamDisplay return= createContainer(rootDisplay);
const audioDisplay = createContainer(rootDisplay);
const }
audioTypeDisplay = createInfoDisplay(audioDisplay);
const audioTrackDisplay = createContainer(audioDisplay);
}
const audioStateButton = AudioStateButton();
removeLocalDisplayhideItem(idstreamDisplay);
hideItem(audioDisplay);
} hideItem(publisherNameDisplay);
}hideItem(currentQualityDisplay);
video.addEventListener('resize', function (event) {hideItem(videoTypeDisplay);
hideItem(qualitySwitchDisplay); |
3.2. ABR initialization
createRemoteDisplay() code
Stream playback quality parameters are set to switch automatically to appropriate quality
Code Block |
---|
|
streamNameDisplay.innerHTML = "Name: "const +abr name + "<br/>Max.resolution: " + video.videoWidth + "x" + video.videoHeight;
= ABR(abrQualityCheckPeriod, [
{parameter: "nackCount", maxLeap: 10},
resizeVideo(event.target);
}); |
2.6. Video container addition to HTML page
add() code
Code Block |
---|
|
{parameter: "freezeCount", maxLeap: 10},
localDisplays[id] = coreDisplay;
{parameter: "packetsLost", localDisplayDiv.appendChild(coreDisplay);
maxLeap: 10}
], returnabrKeepOnGoodQuality, coreDisplayabrTryForUpperQuality); |
3.3.
...
Video tag addition
setVideo() code
Code Block |
---|
|
const stop =setVideo: function (stream) {
for (const [key, value] of Object.entries(localDisplays) if (video) {
removeLocalDisplay(value.id video.remove();
}
} |
Room streams published displaying
1. Initialization
initRemoteDisplay() code
The initLocalDisplay() function returns the object to work with HTML5 tags to display remote video and audio streams
Code Block |
---|
|
const initRemoteDisplay = function(mainDiv, room, peerConnection) {
const constants = SFU.constants if (stream == null) {
video = null;
const remoteParticipants = {};
...
const createRemoteDisplay = function(id, name, mainDiv) { this.videoMid = undefined;
...
}
const stop = qualityDivs.forEach(function(div) {
...
}
peerConnection.ontrack = div.remove({transceiver});
=> {
...
}
return {});
stop: stop
}
} |
2. Room events handling
2.1. ADD_TRACKS
initRemoteDisplay() code
Where:
- a new participant is added to participants list
- tracks quality information is added to tracks list
- elements to display remote audion and video are created
Code Block |
---|
|
room.on(constants.SFU_ROOM_EVENT.ADD_TRACKS, function(e) {
qualityDivs = [];
console.log("Received ADD_TRACKS") return;
let participant = remoteParticipants[e.info.nickName]; }
if (!participant) {
showItem(streamDisplay);
video = document.createElement("video");
participant = {};
participantvideo.nickNamecontrols = e.info.nickName"controls";
participant.tracks video.muted = []true;
participant.displays video.autoplay = []true;
remoteParticipants[participant.nickName] = participant;
if (Browser().isSafariWebRTC()) {
}
participant.tracks.push.apply(participant.tracks, e.info.info);
for (const pTrack of e.info.info) {video.setAttribute("playsinline", "");
...
video.setAttribute("webkit-playsinline", "");
let display = createRemoteDisplay(participant.nickName, participant.nickName, mainDiv);
participantthis.displays.pushsetWebkitEventHandlers(displayvideo);
if (pTrack.type === "VIDEO") } else {
display.videoMid = pTrack.midthis.setEventHandlers(video);
display.setTrackInfo(pTrack);}
} else if (pTrack.type === "AUDIO") {streamDisplay.appendChild(video);
displayvideo.audioMidsrcObject = pTrack.midstream;
} this.setResizeHandler(video);
}
...abr.start();
}); |
2.2. REMOVE_TRACKS
initRemoteDisplay() code
Where:
...
3.4. Audio tag addition
setAudio() code
Code Block |
---|
language | js |
---|
theme | RDark | RDark |
---|
|
setAudio: function(stream) {
}).on(constants.SFU_ROOM_EVENT.REMOVE_TRACKS, function(eif (audio) {
console.log("Received REMOVE_TRACKS" audio.remove();
const participant = remoteParticipants[e.info.nickName];
}
if (!participantstream) {
audio = null;
return;
this.audioMid = }undefined;
for (const rTrack of e.info.info) {
return;
for (let i = 0; i < participant.tracks.length; i++) {}
if (rTrack.mid === participant.tracks[i].mid) {showItem(audioDisplay);
audio = participant.tracks.splice(i, 1document.createElement("audio");
audio.controls = "controls";
break;
audio.muted = true;
}
audio.autoplay = }true;
for (let i = 0; i < participant.displays.length; i++if (Browser().isSafariWebRTC()) {
let found = false audio.setAttribute("playsinline", "");
const display = participant.displays[i]audio.setAttribute("webkit-playsinline", "");
if (display.audioMid === rTrackthis.midsetWebkitEventHandlers(audio);
{
} else {
display.setAudio(null);
this.setEventHandlers(audio);
found = true;
}
} else if (display.videoMid === rTrack.mid) {
audioTrackDisplay.appendChild(audio);
displayaudioStateButton.setVideo(nullmakeButton(audioTypeDisplay, audio);
audio.srcObject = stream;
found = true;
audio.onloadedmetadata = function (e) }{
if (found audio.play().then(function() {
if (Browser(!display).hasAudioisSafariWebRTC() && !display.hasVideoBrowser().isiOS()) {
display.dispose();
console.warn("Audio track should be manually unmuted in iOS Safari");
participant.displays.splice(i, 1);
} else {
}
audio.muted = breakfalse;
}
}audioStateButton.setButtonState();
}
}); |
2.3. LEFT
initRemoteDisplay() code
Where:
- participant is removed from participants list
- video elements are removed
Code Block |
---|
|
}).on(constants.SFU_ROOM_EVENT.LEFT, function(e) { }
console.log("Received LEFT");
let participant = remoteParticipants[e.name] });
if (!participant) { };
return;
}, |
3.5. Audio and video tags event handlers set up
setResizeHandler(), setEventHandlers(), setWebkitEventHandlers() code
Code Block |
---|
|
}
participant.displays.forEach(setEventHandlers: function(displayvideo) {
display.dispose();
// Ignore })play/pause button
delete remoteParticipants[e.name];
}); |
2.4. TRACK_QUALITY_STATE
initRemoteDisplay() code
Where:
- track quality data are updated
Code Block |
---|
|
}).on(constants.SFU_ROOM_EVENT.TRACK_QUALITY_STATEvideo.addEventListener("pause", function (e) {
console.log("ReceivedMedia paused trackby quality stateclick, continue...");
const participant = remoteParticipants[e.info.nickName];
if video.play(!participant);
{
return});
},
setWebkitEventHandlers: function(video) {
let needRestart for (const rTrack of e.info.tracks) {
= false;
constlet midisFullscreen = rTrack.midfalse;
for (let i = 0; i < participant.displays.length; i++) { // Use webkitbeginfullscreen event to detect full screen mode in iOS Safari
const display = participant.displays[i];
video.addEventListener("webkitbeginfullscreen", function () {
if (display.videoMid === mid) {
isFullscreen = true;
display.updateQualityInfo(rTrack.quality});
break;
video.addEventListener("pause", function () }{
}
}
if (needRestart) }); |
3. Tags to display remote video creation
3.1. Container div creation
createRemoteDisplay() code
Where:
- container
div
tag for participants streams is created - child container
div
tag for a certain stream is created - container
div
tag for quality switch buttons is created
Code Block |
---|
|
{
const cell = document.createElement("div");
cellconsole.setAttributelog("class", "text-centerMedia paused after fullscreen, continue...");
cell.id = id;
mainDiv.appendChild(cell);
const streamNameDisplay = document.createElement("div"video.play();
streamNameDisplay.innerHTML = "Published by: " + name;
streamNameDisplay.setAttribute("style","width:auto; height:auto;");
needRestart = false;
streamNameDisplay.setAttribute("class","text-center");
cell.appendChild(streamNameDisplay);
} else {
const qualityDisplay = document.createElement("div");
qualityDisplay.setAttribute("style","width:auto; height:auto;");
qualityDisplayconsole.setAttributelog("class","text-centerMedia paused by click, continue...");
cell.appendChild(qualityDisplay);
let qualityDivs = [] video.play();
const rootDisplay = document.createElement("div");
rootDisplay.setAttribute("style","width:auto; height:auto;"); }
rootDisplay.setAttribute("class","text-center");
cell.appendChild(rootDisplay});
const streamDisplay = documentvideo.createElementaddEventListener("divwebkitendfullscreen");
, function () {
streamDisplay.setAttribute("style","width:auto; height:auto;");
streamDisplayvideo.setAttribute("class","text-center"play();
rootDisplay.appendChild(streamDisplay); |
3.2. Video tag addition
setVideo() code
Code Block |
---|
|
setVideo: function(stream) {needRestart = true;
if (video) {
isFullscreen = false;
video.remove(});
}
}, |
3.6. Adding track info to ABR
setVideoABRTrack() code
Code Block |
---|
|
if (stream == nullsetVideoABRTrack: function(track) {
video = nullabr.setTrack(track);
this.videoMid = undefined;
}, |
3.7. Setting up quality switcher
setTrackInfo() code
Where:
- quality switch buttons are set up
- tracks published info is added to ABR
- tags to display a current playback quality and audio/video source are shown or hidden
Code Block |
---|
|
qualityDivs.forEach(setTrackInfo: function(divtrackInfo) {
if (trackInfo) {
div.remove();
if (trackInfo.quality) {
});
qualityDivs = [] showItem(qualitySwitchDisplay);
return;
if (abr.isEnabled()) {
}
const videoautoDiv = document.createElementcreateQualityButton("video");
video.muted = trueAuto", qualityDivs, qualitySwitchDisplay);
if(Browser().isSafariWebRTC()) {
autoDiv.style.color = QUALITY_COLORS.SELECTED;
video.setAttribute("playsinline", "");
videoautoDiv.setAttribute("webkit-playsinline", "");addEventListener('click', function() {
}
streamDisplay.appendChildsetQualityButtonsColor(videoqualityDivs);
video.srcObject = stream;
videoautoDiv.style.onloadedmetadatacolor = function (e) {
QUALITY_COLORS.SELECTED;
video.play().then(function() {
abr.setAuto();
video.muted = false;
});
};
}
video.addEventListener("resize", function (event) {
for (let i = 0; i < streamNameDisplay.innerHTML = "Published by: " + name + "<br/>Current resolution: " + video.videoWidth + "x" + video.videoHeight;
trackInfo.quality.length; i++) {
resizeVideoabr.addQuality(event.targettrackInfo.quality[i]);
} const qualityDiv = createQualityButton(trackInfo.quality[i], qualityDivs, qualitySwitchDisplay);
}, |
3.3. Audio tag addition
setAudio() code
Code Block |
---|
|
setAudio:qualityDiv.addEventListener('click', function(stream) {
if (audio) {
console.log("Clicked on quality " + trackInfo.quality[i] + " trackId " + audiotrackInfo.remove(id);
}
if (!streamqualityDiv.style.color === QUALITY_COLORS.UNAVAILABLE) {
audio = null;
this.audioMid = undefinedreturn;
return;
}
audio = document.createElement("audio");
audio.controls = "controls"setQualityButtonsColor(qualityDivs);
audio.muted = true;
audioqualityDiv.style.autoplaycolor = trueQUALITY_COLORS.SELECTED;
cell.appendChild(audio);
audio.srcObject = streamabr.setManual();
audio.onloadedmetadata = function (e) {
audio.play().then(function() {
abr.setQuality(trackInfo.quality[i]);
audio.muted = false});
});
}
};
} }, |
3.4. Setting up quality switcher
setTrackInfo() code
Code Block |
---|
|
else {
setTrackInfo: function(trackInfo) {
hideItem(qualitySwitchDisplay);
if (trackInfo && trackInfo.quality) {
}
for (let i = 0; i <if (trackInfo.quality.length; i++type) {
constcontentType qualityDiv = document.createElement("button")trackInfo.contentType || "";
if (trackInfo.type == "VIDEO" && qualityDivs.push(qualityDiv);displayOptions.type && contentType !== "") {
qualityDiv.innerText = trackInfo.quality[i] showItem(videoTypeDisplay);
qualityDiv.setAttribute("style", "display:inline-block; border: solid; border-width: 1px") videoTypeDisplay.innerHTML = contentType;
qualityDiv.style.color = "red";
}
if qualityDiv.addEventListener('click', function(){
(trackInfo.type == "AUDIO") {
console.log("Clicked on quality " + trackInfo.quality[i] + " trackId " + trackInfoaudioStateButton.idsetContentType(contentType);
}
if (qualityDiv.style.color === "red") {
}
}
return;
}, |
3.8. Adding available tracks quality info to ABR
updateQualityInfo() code
Code Block |
---|
|
updateQualityInfo: function(videoQuality) {
}
showItem(qualitySwitchDisplay);
for (const qualityInfo of videoQuality) {
for (let c = 0; c < qualityDivs.length; c++) {
let qualityColor = QUALITY_COLORS.UNAVAILABLE;
if (qualityDivs[c].style.color !qualityInfo.available === "red"true) {
qualityColor = QUALITY_COLORS.AVAILABLE;
qualityDivs[c].style.color = "gray";
}
for (const qualityDiv of qualityDivs) }{
if (qualityDiv.innerText }=== qualityInfo.quality){
qualityDiv.style.color = "blue"qualityColor;
room.changeQuality(trackInfo.id, trackInfo.quality[i]) break;
});
}
qualityDisplay.appendChild(qualityDiv);
}abr.setQualityAvailable(qualityInfo.quality, qualityInfo.available);
}
}, |
3.
...
9. Removing the container
dispose() code
Code Block |
---|
|
dispose: function() {
abr.stop();
cell.remove();
}, |
4. Subscription to ontrack PeerConnection event
PeerConnection.ontrack(), setAudio(), setVideo(), setVideoABRTrack() code
Where:
- video or audio tag addition function is called when track is received
Code Block |
---|
|
peerConnection.ontrack = ({transceiver}) => {
let rParticipant;
console.log("Attach remote track " + transceiver.receiver.track.id + " kind " + transceiver.receiver.track.kind + " mid " + transceiver.mid);
for (const [nickName, participant] of Object.entries(remoteParticipants)) {
for (const pTrack of participant.tracks) {
console.log("Participant " + participant.nickName + " track " + pTrack.id + " mid " + pTrack.mid);
if (pTrack.mid === transceiver.mid) {
rParticipant = participant;
break;
}
}
if (rParticipant) {
break;
}
}
if (rParticipant) {
for (const display of rParticipant.displays) {
if (transceiver.receiver.track.kind === "video") {
if (display.videoMid === transceiver.mid) {
let stream = new MediaStream();
stream.addTrack(transceiver.receiver.track);
display.setVideoABRTrack(transceiver.receiver.track);
display.setVideo(stream);
break;
}
} else if (transceiver.receiver.track.kind === "audio") {
if (display.audioMid === transceiver.mid) {
let stream = new MediaStream();
stream.addTrack(transceiver.receiver.track);
display.setAudio(stream);
break;
}
}
}
} else {
console.warn("Failed to find participant for track " + transceiver.receiver.track.id);
}
} |
5. Playback stopping
stop() code
Code Block |
---|
|
const stop = function() {
for (const [nickName, participant] of Object.entries(remoteParticipants)) {
participant.displays.forEach(function(display){
display.dispose();
});
delete remoteParticipants[nickName];
}
} |