Contains
This module contains code related to displaying local and remote media. Code is separated in two closures - one for local media and another one for remote
Local media
Display local media
1. Wrapper function
initLocalDisplay() code
Define top level closure for local media code
Code Block |
---|
|
const initLocalDisplay = function(localDisplayElement){ |
2. Local variables
code
Define local variables
Code Block |
---|
|
const localDisplayDiv = localDisplayElement;
const localDisplays = {}; |
3. Remove local display
removeLocalDisplay() code
Define function that will remove local display once track is ended
Code Block |
---|
|
const removeLocalDisplay = function(id) {
delete localDisplays[id];
$('#' + id).remove();
reassembleLocalLayout();
} |
4. Find video element without audio track
getAudioContainer() code
Define function that will find video element without audio track.
Code Block |
---|
|
const getAudioContainer = function() {
for (const [key, value] of Object.entries(localDisplays)) {
let video = value.getElementsByTagName("video");
if (video && video[0]) {
let audioStateButton = value.getElementsByTagName("button");
let audioTracks = video[0].srcObject.getAudioTracks();
if (!audioTracks || audioTracks.length === 0) {
return {
id: value.id,
video: video[0],
audioStateDisplay: audioStateButton[0]
}
}
}
}
}; |
...
5. Add track to local display
add() code
Define add function which will add new track to local display
...
Check if new track is an audio track. If so try to find video element without audio track attached. In case video element was found attach new audio track to it and return.
code
Code Block |
---|
|
if (stream.getAudioTracks().length > 0) {
let videoElement = getAudioContainer();
if (videoElement) {
let track = stream.getAudioTracks()[0];
videoElement.video.srcObject.addTrack(track);
videoElement.audioStateDisplay.innerHTML = "Audio state: " + stream.getAudioTracks()[0].enabled;
track.addEventListener("ended", function() {
videoElement.video.srcObject.removeTrack(track);
videoElement.audioStateDisplay.innerHTML = "Audio state: " + false;
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(videoElement.video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(videoElement.id);
});
return;
}
} |
Create new display container
code
Code Block |
---|
|
const coreDisplay = document.createElement('div');
coreDisplay.setAttribute("style","width:200px; height:auto; border: solid; border-width: 1px");
coreDisplay.id = stream.id; |
Create and add name container to display container
code
Code Block |
---|
|
const streamNameDisplay = document.createElement("div");
streamNameDisplay.innerHTML = "Name: " + name;
streamNameDisplay.setAttribute("style","width:auto; height:30px");
coreDisplay.appendChild(streamNameDisplay); |
Create and add audio state display. Subscribe to "click" event to handle mute/unmute.
code
Code Block |
---|
|
const audioStateDisplay = document.createElement("button");
audioStateDisplay.setAttribute("style","width:auto; height:30px");
audioStateDisplay.innerHTML = "Audio state: " + (stream.getAudioTracks().length > 0 ? stream.getAudioTracks()[0].enabled : false);
audioStateDisplay.addEventListener('click', function(){
if (stream.getAudioTracks().length > 0) {
stream.getAudioTracks()[0].enabled = !(stream.getAudioTracks()[0].enabled);
audioStateDisplay.innerHTML = "Audio state: " + stream.getAudioTracks()[0].enabled;
}
});
coreDisplay.appendChild(audioStateDisplay); |
Create stream display which will hold video element.
code
Code Block |
---|
|
const streamDisplay = document.createElement('div');
streamDisplay.id = id;
streamDisplay.setAttribute("style","width:auto; height:auto");
coreDisplay.appendChild(streamDisplay); |
Create video element and add it to stream display.
code
Code Block |
---|
|
const video = document.createElement("video");
streamDisplay.appendChild(video);
video.srcObject = stream;
video.muted = true;
video.onloadedmetadata = function (e) {
video.play();
}; |
Subscribe to track's "ended" event. Once track is ended check if video element has any active tracks. If not - remove display.
code
Code Block |
---|
|
stream.getTracks().forEach(function(track){
track.addEventListener("ended", function() {
video.srcObject.removeTrack(track);
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(id);
});
}); |
Subscribe to resize event to keep video inside the container.
code
Code Block |
---|
|
video.addEventListener('resize', function (event) {
streamNameDisplay.innerHTML = "Name: " + name + " " + video.videoWidth + "x" + video.videoHeight;
resizeVideo(event.target);
}); |
Save display, reassemble local displays and return newly created one.
code
Code Block |
---|
|
localDisplays[id] = coreDisplay;
reassembleLocalLayout();
return coreDisplay; |
6. Refresh local display grid
reassembleLocalLayout() code
Define helper which will recalculate local displays grid and reassemble local displays.
Code Block |
---|
|
const reassembleLocalLayout = function() {
let gridWidth = gridSize(Object.keys(localDisplays).length).x;
let container = document.createElement('div');
let row;
let rowI = 1;
let colI = 0;
for (const [key, value] of Object.entries(localDisplays)) {
if (row) {
if (colI >= gridWidth) {
row = createRow(container);
rowI++;
colI = 0;
}
} else {
row = createRow(container);
}
$("#" + key).detach();
let col = createCol(row);
col.appendChild(value);
colI++;
}
$(localDisplayDiv).empty();
localDisplayDiv.appendChild(container);
} |
7. Export add function for main code
code.
Code Block |
---|
|
return {
add: add
} |
Remote media
Display remote media
1. Wrapper function
initRemoteDisplay() code
Define top level closure for remote media code
Code Block |
---|
|
const initRemoteDisplay = function(room, mainDiv, peerConnection) { |
2. Local variables
code
Define local variables
Code Block |
---|
|
const constants = SFU.constants;
const remoteParticipants = {}; |
3. Subscribe to
...
room events
code
...
Subscribe to related room events
Code Block |
---|
|
room.on(constants.SFU_ROOM_EVENT.ADD_TRACKS, function(e) {
let participant = remoteParticipants[e.info.nickName];
if (!participant) {
participant = {};
participant.nickName = e.info.nickName;
participant.tracks = [];
participant.displays = [];
remoteParticipants[participant.nickName] = participant;
}
participant.tracks.push.apply(participant.tracks, e.info.info);
for (const pTrack of e.info.info) {
if (pTrack.type === "VIDEO") {
let createDisplay = true;
for const(let displayObji = 0; i < participant.displays.length; i++) {
let display: createRemoteDisplay(participant.nickName, = participant.nickName, null, pTrack),
displays[i];
mediaStream: new MediaStream(),if (pTrack.type === "VIDEO") {
mids: {
if (display.hasVideo()) {
audio: [],
continue;
video: undefined
}
},
display.videoMid = pTrack.mid;
audioStreams: {},
audioElements: {} display.setTrackInfo(pTrack);
};
createDisplay = false;
const video = displayObj.display.getElementsByTagName("video")[0];
video.srcObject = displayObj.mediaStreambreak;
displayObj.mids.video = pTrack.mid;
} else if (pTrack.type === "AUDIO") {
participant.displays.push(displayObj);
}
if }
}).on(constants.SFU_ROOM_EVENT.REMOVE_TRACKS, function(e(display.hasAudio()) {
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
returncontinue;
}
for (const rTrack of e.info.info) {
}
for (let i = 0; i < participant.tracks.length; i++) {
display.audioMid = if (rTrackpTrack.mid === participant.tracks[i].mid) {
;
participant.tracks.splice(i, 1);
createDisplay = false;
break;
}
}
}
for (let i = 0; i < participant.displays.length; i++ if (!createDisplay) {
let found = falsecontinue;
const}
display = participant.displays[i];
let display = if createRemoteDisplay(display.mids.audio.includes(rTrack.mid)) {participant.nickName, participant.nickName, mainDiv);
//remove from mids arrayparticipant.displays.push(display);
if (pTrack.type display.mids.audio.splice(display.mids.audio.indexOf(rTrack.mid), 1);=== "VIDEO") {
//stop track and remove streamdisplay.videoMid = pTrack.mid;
display.audioStreams[rTrack.mid].getAudioTracks()[0].stop()setTrackInfo(pTrack);
} else if (pTrack.type delete display.audioStreams[rTrack.mid];=== "AUDIO") {
//remove audio elementdisplay.audioMid = pTrack.mid;
}
display.display.removeChild(display.audioElements[rTrack.mid]);}
}).on(constants.SFU_ROOM_EVENT.REMOVE_TRACKS, function(e) {
const participant delete= display.audioElements[rTrack.midremoteParticipants[e.info.nickName];
if (!participant) {
found = truereturn;
}
} else iffor (display.mids.video === rTrack.midconst rTrack of e.info.info) {
for (let display.mids.video i = undefined0;
i < participant.tracks.length; i++) {
display.mediaStream.getVideoTracks()[0].stop();
if (rTrack.mid === participant.tracks[i].mid) {
found = true;
}participant.tracks.splice(i, 1);
if (display.mids.audio.length === 0 && display.mids.video === undefined) {break;
const video = display.display.getElementsByTagName("video")[0]}
}
video.pause();
for (let i = 0; i < video.srcObject = null;participant.displays.length; i++) {
display.display.remove();
let found = false;
const display = participant.displays.splice(i, 1)[i];
}
if (display.audioMid === rTrack.mid) {
if (found) {
breakdisplay.setAudio(null);
}
found = }true;
}
}).on(constants.SFU_ROOM_EVENT.LEFT, function(e) {
let participant = remoteParticipants[e.name];
} else if (!participantdisplay.videoMid === rTrack.mid) {
return;
}
delete remoteParticipants[e.name];
}).on(constants.SFU_ROOM_EVENT.TRACK_QUALITY_STATE, function(e){
consoledisplay.log("Received track quality state"setVideo(null);
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
found = returntrue;
}
for (const rTrack of e.info.tracks) { }
const mid = rTrack.mid;
if (found) {
let vDisplay;
for (let i = 0; i <if participant.displays.length; i++(!display.hasAudio() && !display.hasVideo()) {
const display = participant.displays[i];
display.dispose();
if (display.mids.video === mid) {
vDisplay = display participant.displays.splice(i, 1);
break;
}
}
}
break;
//todo rework loops
if (vDisplay) {}
for}
(const qualityInfo of rTrack.quality) {
}
for (const child of vDisplay.display.childNodes}).on(constants.SFU_ROOM_EVENT.LEFT, function(e) {
let participant = remoteParticipants[e.name];
if (child.childNodes.length > 0!participant) {
return;
}
for (const cChild of child.childNodes) participant.displays.forEach(function(display){
display.dispose();
})
if (cChild.innerHTML === qualityInfo.quality) {delete remoteParticipants[e.name];
}).on(constants.SFU_ROOM_EVENT.TRACK_QUALITY_STATE, function(e){
console.log("Received track quality state");
const participant if (qualityInfo.available === true) {
= remoteParticipants[e.info.nickName];
if (!participant) {
return;
}
for (const rTrack of cChilde.style.color = "gray";
info.tracks) {
const mid = rTrack.mid;
for (let i = 0; i } else< participant.displays.length; i++) {
const display = participant.displays[i];
if cChild.style.color(display.videoMid = "red";== mid) {
display.updateQualityInfo(rTrack.quality);
}
break;
}
break;
}
}
}); |
SFU_ROOM_EVENT.ADD_TRACKS
Find participant. If not found create a new one.
code
Code Block |
---|
|
let participant = remoteParticipants[e.info.nickName];
if (!participant) {
participant = {};
participant.nickName = e.info.nickName;
participant.tracks = [];
participant.displays = [];
remoteParticipants[participant.nickName] = }
participant;
} |
Add new tracks to the participant.
code
Code Block |
---|
|
participant.tracks.push.apply(participant.tracks, e.info.info); |
Create display for every video track that is beeing added.
code
Code Block |
---|
|
for (const pTrack of e.info.info) {
}
let createDisplay = true;
}
for (let i = 0; i }
< participant.displays.length; i++) {
}
}
}); |
SFU_ROOM_EVENT.ADD_TRACKS
Find participant. If not found create a new one.
Code Block |
---|
|
let participant = remoteParticipants[e.info.nickName];
if (!participant) {
let display = participant.displays[i];
participant = {};
if participant(pTrack.nickNametype === e.info.nickName; "VIDEO") {
participant.tracks = [];
participant.displays = [];
remoteParticipants[participant.nickName] = participant;
} |
Add new tracks to the participant.
Code Block |
---|
|
participant.tracks.push.apply(participant.tracks, e.info.info); |
Create display for every video track that is beeing added.
Code Block |
---|
|
for (const pTrack of e.info.info) {
if (pTrack.type === "VIDEO") {
if (display.hasVideo()) {
const displayObj = {continue;
display: createRemoteDisplay(participant.nickName, participant.nickName, null, pTrack),
}
mediaStream: new MediaStream(),
display.videoMid = pTrack.mid;
mids: {
display.setTrackInfo(pTrack);
audio: [],
createDisplay video:= undefinedfalse;
},
break;
audioStreams: {},
} else if (pTrack.type audioElements:=== "AUDIO") {}
};
const video =if displayObj.(display.getElementsByTagNamehasAudio("video")[0];
)) {
video.srcObject = displayObj.mediaStream;
displayObj.mids.video = pTrack.mid;
continue;
participant.displays.push(displayObj);
}
} |
SFU_ROOM_EVENT.REMOVE_TRACKS
Find remote participant. If not found return.
Code Block |
---|
|
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
return;
} |
Walk through tracks
Code Block |
---|
|
for (const rTrack of e.info.info) { |
Find and remove participant's track that has the same mid as track that is beeing removed.
Code Block |
---|
|
for (let i = 0; i < participant.tracks.length; i++) {
if (rTrack.mid === participant.tracks[i].mid) {
display.audioMid = pTrack.mid;
createDisplay = false;
participant.tracks.splice(i, 1);
break;
}
} |
Find display that corresponds to track and remove track from the display. If display has no active tracks remove display as well.
Code Block |
---|
|
for (let i = 0; i < participant.displays.length; i++) {
let found = false;
const display = participant.displays[i] }
if (!createDisplay) {
continue;
if (display.mids.audio.includes(rTrack.mid)) {
}
//remove from mids array
let display.mids.audio.splice(display.mids.audio.indexOf(rTrack.mid), 1 = createRemoteDisplay(participant.nickName, participant.nickName, mainDiv);
//stop track and remove stream
participant.displays.push(display);
display.audioStreams[rTrack.mid].getAudioTracks()[0].stop();
if (pTrack.type delete display.audioStreams[rTrack.mid];=== "VIDEO") {
//remove audio element
display.videoMid display.display.removeChild(display.audioElements[rTrack= pTrack.mid]);
delete display.audioElements[rTrack.mid]setTrackInfo(pTrack);
found = true;
} else if (displaypTrack.mids.videotype === rTrack.mid"AUDIO") {
display.mids.video = undefined;
display.audioMid = display.mediaStream.getVideoTracks()[0].stop();
pTrack.mid;
found = true;}
}
if (display.mids.audio.length === 0 && display.mids.video === undefined} |
SFU_ROOM_EVENT.REMOVE_TRACKS
Find remote participant. If not found return.
code
Code Block |
---|
|
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
const video = display.display.getElementsByTagName("video")[0]
video.pause();
video.srcObject = null;return;
} |
Walk through tracks
code
Code Block |
---|
|
for (const rTrack of e.info.info) { |
Find and remove participant's track that has the same mid as track that is being removed
code
Code Block |
---|
|
for (let i = 0; i < participant.tracks.length; i++) {
if (rTrack.mid === participant.tracks[i].mid) {
displayparticipant.displaytracks.remove();
participant.displays.splice(isplice(i, 1);
}
if (found) {
break;
}
} |
...
Find and remove participant.display that corresponds to the track and remove track from the display. If display has no active tracks remove display as well.
code
Code Block |
---|
|
for (let participanti = remoteParticipants[e.name];
if (!participant 0; i < participant.displays.length; i++) {
let found = returnfalse;
}
delete remoteParticipants[e.name]; |
SFU_ROOM_EVENT.TRACK_QUALITY_STATE
Find participant. Return if not found.
Code Block |
---|
|
console.log("Received track quality state");
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
return;
} |
Walk through tracks
Code Block |
---|
|
for (const rTrack of e.info.tracks) { |
Find corresponding display
Code Block |
---|
|
const mid = rTrack.mid;
let vDisplay;
for (let i = 0; i < participant.displays.length; i++) {
const display = participant.displays[i];
if (display.mids.video === mid) {
vDisplay = display;
break;
}
} |
Update quality state
Code Block |
---|
|
if (vDisplay) {
for (const qualityInfo of rTrack.quality) {
for (const child of vDisplay.display.childNodes) {
if (child.childNodes.length > 0) {
const display = participant.displays[i];
if (display.mids.audio.includes(rTrack.mid)) {
//remove from mids array
display.mids.audio.splice(display.mids.audio.indexOf(rTrack.mid), 1);
//stop track and remove stream
display.audioStreams[rTrack.mid].getAudioTracks()[0].stop();
delete display.audioStreams[rTrack.mid];
//remove audio element
display.display.removeChild(display.audioElements[rTrack.mid]);
delete display.audioElements[rTrack.mid];
found = true;
} else if (display.mids.video === rTrack.mid) {
display.mids.video = undefined;
display.mediaStream.getVideoTracks()[0].stop();
found = true;
}
if (display.mids.audio.length === 0 && display.mids.video === undefined) {
const video = display.display.getElementsByTagName("video")[0]
video.pause();
video.srcObject = null;
display.display.remove();
participant.displays.splice(i, 1);
}
if (found) {
break;
}
} |
SFU_ROOM_EVENT.LEFT
Find and remove participant.
code
Code Block |
---|
|
let participant = remoteParticipants[e.name];
if (!participant) {
return;
}
participant.displays.forEach(function(display){
display.dispose();
})
delete remoteParticipants[e.name]; |
SFU_ROOM_EVENT.TRACK_QUALITY_STATE
Find participant. Return if not found.
code
Code Block |
---|
|
console.log("Received track quality state");
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
return;
} |
Walk through tracks
code
Code Block |
---|
|
for (const rTrack of e.info.tracks) { |
Find corresponding display and update quality state
code
Code Block |
---|
|
const mid = rTrack.mid;
for (let i = 0; i < participant.displays.length; i++) {
const display = participant.displays[i];
if (display.videoMid === mid) {
display.updateQualityInfo(rTrack.quality);
break;
}
} |
4. Create remote display
createRemoteDisplay() code
Helper function which will create display based on stream and track info.
Code Block |
---|
|
const createRemoteDisplay = function(id, name, mainDiv) {
const cell = document.createElement("div");
cell.setAttribute("class", "grid-item");
cell.id = id;
mainDiv.appendChild(cell);
const streamNameDisplay = document.createElement("div");
streamNameDisplay.innerHTML = "Name: " + name;
streamNameDisplay.setAttribute("style","width:auto; height:20px");
cell.appendChild(streamNameDisplay);
const qualityDisplay = document.createElement("div");
qualityDisplay.setAttribute("style","width:auto; height:20px");
cell.appendChild(qualityDisplay);
const tidDisplay = document.createElement("div");
tidDisplay.setAttribute("style","width:auto; height:20px");
cell.appendChild(tidDisplay);
let qualityDivs = [];
let tidDivs = [];
const rootDisplay = document.createElement("div");
rootDisplay.setAttribute("style","width:auto; height:auto");
cell.appendChild(rootDisplay);
const streamDisplay = document.createElement("div");
streamDisplay.setAttribute("style","width:auto; height:auto");
rootDisplay.appendChild(streamDisplay);
let audio = null;
let video = null;
return {
dispose: function() {
cell.remove();
},
hide: function(value) {
if (value) {
cell.style.display = "none";
} else {
cell.style.display = "block";
}
},
setAudio: function(stream) {
if (audio) {
audio.remove();
}
if (!stream) {
audio = null;
this.audioMid = undefined;
return;
}
audio = document.createElement("audio");
audio.controls = "controls";
cell.appendChild(audio);
audio.srcObject = stream;
audio.play();
},
hasAudio: function() {
return audio !== null || this.audioMid !== undefined;
},
setVideo: function(stream) {
if (video) {
video.remove();
}
if (stream == null) {
video = null;
this.videoMid = undefined;
qualityDivs.forEach(function(div) {
div.remove();
});
qualityDivs = [];
tidDivs.forEach(function(div) {
div.remove();
});
tidDivs = [];
return;
}
video = document.createElement("video");
streamDisplay.appendChild(video);
video.srcObject = stream;
video.onloadedmetadata = function (e) {
video.play();
};
video.addEventListener("resize", function (event) {
streamNameDisplay.innerHTML = "Name: " + name + " " + video.videoWidth + "x" + video.videoHeight;
resizeVideo(event.target);
});
},
setTrackInfo: function(trackInfo) {
if (trackInfo && trackInfo.quality) {
for (let i = 0; i < trackInfo.quality.length; i++) {
const qualityDiv = document.createElement("button");
qualityDivs.push(qualityDiv);
qualityDiv.innerText = trackInfo.quality[i];
qualityDiv.setAttribute("style", "display:inline-block; border: solid; border-width: 1px");
qualityDiv.style.color = "red";
qualityDiv.addEventListener('click', function(){
console.log("Clicked on quality " + trackInfo.quality[i] + " trackId " + trackInfo.id);
if (qualityDiv.style.color === "red") {
return;
}
for (let c = 0; c < qualityDivs.length; c++) {
if (qualityDivs[c].style.color !== "red") {
qualityDivs[c].style.color = "gray";
}
}
qualityDiv.style.color = "blue";
room.changeQuality(trackInfo.id, trackInfo.quality[i]);
});
qualityDisplay.appendChild(qualityDiv);
}
for (let i = 0; i < 3; i++) {
const tidDiv = document.createElement("button");
tidDivs.push(tidDiv);
tidDiv.innerText = "TID"+i;
tidDiv.setAttribute("style", "display:inline-block; border: solid; border-width: 1px");
tidDiv.style.color = "gray";
tidDiv.addEventListener('click', function(){
console.log("Clicked on TID " + i + " trackId " + trackInfo.id);
for (let c = 0; c < tidDivs.length; c++) {
tidDivs[c].style.color = "gray";
}
tidDiv.style.color = "blue";
room.changeQuality(trackInfo.id, null, i);
});
tidDisplay.appendChild(tidDiv);
}
}
},
updateQualityInfo: function(videoQuality) {
for (const qualityInfo of videoQuality) {
for (const qualityDiv of qualityDivs) {
if (qualityDiv.innerText === qualityInfo.quality){
if (qualityInfo.available === true) {
qualityDiv.style.color = "gray";
} else {
qualityDiv.style.color = "red";
}
break;
}
}
}
},
hasVideo: function() {
return video !== null || this.videoMid !== undefined;
},
audioMid: undefined,
videoMid: undefined
};
}; |
Remove remote display
dispose() code
Code Block |
---|
|
dispose: function() {
cell.remove();
} |
Hide remote display
hide() code
Code Block |
---|
|
hide: function(value) {
if (value) {
cell.style.display = "none";
} else {
cell.style.display = "block";
}
} |
Create remote display audio element
setAudio() code
Code Block |
---|
|
setAudio: function(stream) {
if (audio) {
audio.remove();
}
if (!stream) {
audio = null;
this.audioMid = undefined;
return;
}
audio = document.createElement("audio");
audio.controls = "controls";
cell.appendChild(audio);
audio.srcObject = stream;
audio.play();
} |
Create remote display video element
setVideo() code
Code Block |
---|
|
setVideo: function(stream) {
if (video) {
video.remove();
}
if (stream == null) {
video = null;
this.videoMid = undefined;
qualityDivs.forEach(function(div) {
div.remove();
});
qualityDivs = [];
tidDivs.forEach(function(div) {
div.remove();
});
tidDivs = [];
return;
}
video = document.createElement("video");
streamDisplay.appendChild(video);
video.srcObject = stream;
video.onloadedmetadata = function (e) {
video.play();
};
video.addEventListener("resize", function (event) {
streamNameDisplay.innerHTML = "Name: " + name + " " + video.videoWidth + "x" + video.videoHeight;
resizeVideo(event.target);
});
} |
Display track quality info
setTrackInfo() code
Code Block |
---|
|
setTrackInfo: function(trackInfo) {
if (trackInfo && trackInfo.quality) {
for (let i = 0; i < trackInfo.quality.length; i++) {
const qualityDiv = document.createElement("button");
qualityDivs.push(qualityDiv);
qualityDiv.innerText = trackInfo.quality[i];
qualityDiv.setAttribute("style", "display:inline-block; border: solid; border-width: 1px");
qualityDiv.style.color = "red";
qualityDiv.addEventListener('click', function(){
console.log("Clicked on quality " + trackInfo.quality[i] + " trackId " + trackInfo.id);
forif (const cChild of child.childNodesqualityDiv.style.color === "red") {
if (cChild.innerHTML === qualityInfo.quality) {
return;
if (qualityInfo.available === true) {
}
cChild.style.color = "gray";
for (let c = 0; c < qualityDivs.length; c++) {
} else {
cChildif (qualityDivs[c].style.color !== "red";") {
}
qualityDivs[c].style.color = "gray";
break;
}
}
}
}
}
} |
Helper function which will create display based on stream and track info.
Code Block |
---|
|
const createRemoteDisplay = function(id, name, stream, trackInfo) {
const cell = document.createElement('div');
cell.setAttribute("class", "grid-item")qualityDiv.style.color = "blue";
cell.id = id;
mainDiv.appendChild(cell);
const streamNameDisplay = document.createElement("div");
streamNameDisplay.innerHTML = "Name: " + name;
streamNameDisplayroom.setAttribute("style","width:auto; height:20px"changeQuality(trackInfo.id, trackInfo.quality[i]);
cell.appendChild(streamNameDisplay);
const qualityDisplay = document.createElement("div");
qualityDisplay.setAttribute("style","width:auto; height:20px" });
cell.appendChild(qualityDisplay);
const tidDisplay = document.createElement("div");
tidDisplay.setAttribute("style","width:auto; height:20px");
cellqualityDisplay.appendChild(tidDisplayqualityDiv);
const qualityDivs = [];
const tidDivs = [];
}
if (trackInfo && trackInfo.quality) {
for (let i = 0; i < trackInfo.quality.length3; i++) {
const qualityDivtidDiv = document.createElement("button");
qualityDivs tidDivs.push(qualityDivtidDiv);
qualityDivtidDiv.innerText = trackInfo.quality[i];
= "TID"+i;
qualityDivtidDiv.setAttribute("style", "display:inline-block; border: solid; border-width: 1px");
qualityDiv tidDiv.style.color = "redgray";
qualityDivtidDiv.addEventListener('click', function(){
console.log("Clicked on qualityTID " + trackInfo.quality[i] + " trackId " + trackInfo.id);
if (qualityDiv.style.color === "red") {
return;
}
for (let c = 0; c < qualityDivstidDivs.length; c++) {
if (qualityDivs[c].style.color !== "red") {
qualityDivstidDivs[c].style.color = "gray";
}
}
}
qualityDivtidDiv.style.color = "blue";
room.changeQuality(trackInfo.id, null, trackInfo.quality[i]);
});
qualityDisplay.appendChild(qualityDiv});
}
for (let i = 0; i < 3; i++) {
tidDisplay.appendChild(tidDiv);
const tidDiv = document.createElement("button");
}
tidDivs.push(tidDiv);
}
tidDiv.innerText = "TID"+i;
} |
Update quality state
updateQualityInfo() code
Code Block |
---|
|
tidDiv.setAttribute("style", "display:inline-block; border: solid; border-widthupdateQualityInfo: 1px");
function(videoQuality) {
tidDiv.style.color = "gray";
for (const qualityInfo of videoQuality) {
tidDiv.addEventListener('click', function(){
for (const qualityDiv of console.log("Clicked on TID " + i + " trackId " + trackInfo.id);
qualityDivs) {
if (qualityDiv.innerText for (let c = 0; c < tidDivs.length; c++) {
=== qualityInfo.quality){
tidDivs[c].style.color = "gray";if (qualityInfo.available === true) {
}
tidDivqualityDiv.style.color = "bluegray";
room.changeQuality(trackInfo.id, null, i);
} else });{
tidDisplay.appendChild(tidDiv);
}
}
const rootDisplay = document.createElement('div');
rootDisplayqualityDiv.setAttribute("style","width:auto; height:auto");
style.color = "red";
cell.appendChild(rootDisplay);
const streamDisplay = document.createElement('div');
streamDisplay.setAttribute("style","width:auto; height:auto");
rootDisplay.appendChild(streamDisplay);
const video = document.createElement("video");
}
streamDisplay.appendChild(video);
video.srcObject = stream;
video.onloadedmetadata = function (e) {
video.play() break;
};
video.addEventListener("ended", function() {
console.log("VIDEO ENDED");
});
video.addEventListener('resize', function (event) {
streamNameDisplay.innerHTML = "Name: "}
+ name + " " + video.videoWidth + "x" + video.videoHeight;
}
resizeVideo(event.target);
});
return cell;
} |
...
5. Work with peer connection
code
Subscribe to PeerConnection's "ontrack" event.
Code Block |
---|
|
peerConnection.ontrack = ({transceiver}) => {
let rParticipant;
console.log("Attach remote track " + transceiver.receiver.track.id + " kind " + transceiver.receiver.track.kind + " mid " + transceiver.mid);
for (const [nickName, participant] of Object.entries(remoteParticipants)) {
for (const pTrack of participant.tracks) {
console.log("Participant " + participant.nickName + " track " + pTrack.id + " mid " + pTrack.mid);
if (pTrack.mid === transceiver.mid) {
rParticipant = participant;
break;
}
}
if (rParticipant) {
break;
}
}
if (rParticipant) {
for (const display of rParticipant.displays) {
if (transceiver.receiver.track.kind === "video") {
if (display.mids.videovideoMid === transceiver.mid) {
let stream = new MediaStream();
display.mediaStreamstream.addTrack(transceiver.receiver.track);
display.display.getElementsByTagName("video")[0].play(setVideo(stream);
break;
}
} else if (transceiver.receiver.track.kind === "audio") {
if (display.mids.audio.includes(audioMid === transceiver.mid)) {
break;
let stream = new MediaStream();
}
display.mids.audio.pushstream.addTrack(transceiver.receiver.midtrack);
let aStream = new MediaStreamdisplay.setAudio(stream);
aStream.addTrack(transceiver.receiver.track);
break;
display.audioStreams[transceiver.mid] = aStream;
}
let audio = document.createElement("audio");
}
audio.controls = "controls";
}
} else {
display.audioElements[transceiver.mid] = audio;
console.warn("Failed to find participant for track " + displaytransceiver.receiver.displaytrack.appendChild(audioid);
}
audio.srcObject = aStream;
audio.play();
break;} |
Find participant based on track's mid
code
Code Block |
---|
|
let rParticipant;
console.log("Attach remote track " + transceiver.receiver.track.id + " kind " + transceiver.receiver.track.kind + " mid " + transceiver.mid);
for (const [nickName, participant] of Object.entries(remoteParticipants)) {
for (const pTrack of participant.tracks) {
console.log("Participant " + participant.nickName }
+ " track " + pTrack.id + " }
mid " } else {
+ pTrack.mid);
if console.warn("Failed to find participant for track " + transceiver.receiver.track.id);
}
} |
Find participant based on track's mid
Code Block |
---|
|
let rParticipant;
console.log("Attach remote track " + transceiver.receiver.track.id + " kind " + transceiver.receiver.track.kind + " mid " + transceiver.mid);
for (const [nickName, participant] of Object.entries(remoteParticipants)) {
for (const pTrack of participant.tracks) {
(pTrack.mid === transceiver.mid) {
rParticipant = participant;
break;
}
}
if (rParticipant) {
console.log("Participant " + participant.nickName + " track " + pTrack.id + " mid " + pTrack.mid);
break;
}
} |
Find corresponding display among participant's displays and add track.
code
Code Block |
---|
|
for (const display of rParticipant.displays) {
if (pTrack.midtransceiver.receiver.track.kind === transceiver.mid"video") {
rParticipant = participant; if (display.videoMid === transceiver.mid) {
break;
}
}
let stream = new if MediaStream(rParticipant);
{
break;
}
} |
Find corresponding display among participant's displays and add track.
Code Block |
---|
|
for (const display of rParticipant.displays) {
if stream.addTrack(transceiver.receiver.track.kind === "video") {);
if (display.mids.video === transceiver.mid) {
display.mediaStream.addTrack(transceiver.receiver.tracksetVideo(stream);
display.display.getElementsByTagName("video")[0].play();
break;
}
} else if (transceiver.receiver.track.kind === "audio") {
if (display.mids.audio.includes(audioMid === transceiver.mid)) {
break;
}
let stream = display.mids.audio.push(transceiver.midnew MediaStream();
let aStream = new MediaStream();
aStreamstream.addTrack(transceiver.receiver.track);
display.audioStreams[transceiver.mid] = aStream;
let audio = documentdisplay.createElementsetAudio("audio"stream);
audio.controls = "controls";
display.audioElements[transceiver.mid] = audiobreak;
display.display.appendChild(audio);
audio.srcObject = aStream;
}
audio.play(); }
break;
}
} |