Contains code related to displaying local and remote media. Code is separated in two closures - one for local media and another one for remote
Local media
Define top level closure for local media code
Code Block |
---|
|
const initLocalDisplay = function(localDisplayElement){ |
Define local variables
Code Block |
---|
|
const localDisplayDiv = localDisplayElement;
const localDisplays = {}; |
Define function that will remove local display once track is ended
Code Block |
---|
|
const removeLocalDisplay = function(id) {
delete localDisplays[id];
$('#' + id).remove();
reassembleLocalLayout();
} |
Define function that will find video element without audio track.
Code Block |
---|
|
const getAudioContainer = function() {
for (const [key, value] of Object.entries(localDisplays)) {
let video = value.getElementsByTagName("video");
if (video && video[0]) {
let audioStateButton = value.getElementsByTagName("button");
let audioTracks = video[0].srcObject.getAudioTracks();
if (!audioTracks || audioTracks.length === 0) {
return {
id: value.id,
video: video[0],
audioStateDisplay: audioStateButton[0]
}
}
}
}
}; |
add function
Define add function which will add new track to local display
Code Block |
---|
|
const add = function(id, name, stream) {
if (stream.getAudioTracks().length > 0) {
let videoElement = getAudioContainer();
if (videoElement) {
let track = stream.getAudioTracks()[0];
videoElement.video.srcObject.addTrack(track);
videoElement.audioStateDisplay.innerHTML = "Audio state: " + stream.getAudioTracks()[0].enabled;
track.addEventListener("ended", function() {
videoElement.video.srcObject.removeTrack(track);
videoElement.audioStateDisplay.innerHTML = "Audio state: " + false;
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(videoElement.video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(videoElement.id);
});
return;
}
}
const coreDisplay = document.createElement('div');
coreDisplay.setAttribute("style","width:200px; height:auto; border: solid; border-width: 1px");
coreDisplay.id = stream.id;
const streamNameDisplay = document.createElement("div");
streamNameDisplay.innerHTML = "Name: " + name;
streamNameDisplay.setAttribute("style","width:auto; height:30px");
coreDisplay.appendChild(streamNameDisplay);
const audioStateDisplay = document.createElement("button");
audioStateDisplay.setAttribute("style","width:auto; height:30px");
audioStateDisplay.innerHTML = "Audio state: " + (stream.getAudioTracks().length > 0 ? stream.getAudioTracks()[0].enabled : false);
audioStateDisplay.addEventListener('click', function(){
if (stream.getAudioTracks().length > 0) {
stream.getAudioTracks()[0].enabled = !(stream.getAudioTracks()[0].enabled);
audioStateDisplay.innerHTML = "Audio state: " + stream.getAudioTracks()[0].enabled;
}
});
coreDisplay.appendChild(audioStateDisplay);
const streamDisplay = document.createElement('div');
streamDisplay.id = id;
streamDisplay.setAttribute("style","width:auto; height:auto");
coreDisplay.appendChild(streamDisplay);
const video = document.createElement("video");
streamDisplay.appendChild(video);
video.srcObject = stream;
video.muted = true;
video.onloadedmetadata = function (e) {
video.play();
};
stream.getTracks().forEach(function(track){
track.addEventListener("ended", function() {
video.srcObject.removeTrack(track);
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(id);
});
});
video.addEventListener('resize', function (event) {
streamNameDisplay.innerHTML = "Name: " + name + " " + video.videoWidth + "x" + video.videoHeight;
resizeVideo(event.target);
});
localDisplays[id] = coreDisplay;
reassembleLocalLayout();
return coreDisplay;
} |
Check if new track is an audio track. If so try to find video element without audio track attached. In case video element was found attach new audio track to it and return.
Code Block |
---|
|
if (stream.getAudioTracks().length > 0) {
let videoElement = getAudioContainer();
if (videoElement) {
let track = stream.getAudioTracks()[0];
videoElement.video.srcObject.addTrack(track);
videoElement.audioStateDisplay.innerHTML = "Audio state: " + stream.getAudioTracks()[0].enabled;
track.addEventListener("ended", function() {
videoElement.video.srcObject.removeTrack(track);
videoElement.audioStateDisplay.innerHTML = "Audio state: " + false;
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(videoElement.video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(videoElement.id);
});
return;
}
} |
Create new display container
Code Block |
---|
|
const coreDisplay = document.createElement('div');
coreDisplay.setAttribute("style","width:200px; height:auto; border: solid; border-width: 1px");
coreDisplay.id = stream.id; |
Create and add name container to display container
Code Block |
---|
|
const streamNameDisplay = document.createElement("div");
streamNameDisplay.innerHTML = "Name: " + name;
streamNameDisplay.setAttribute("style","width:auto; height:30px");
coreDisplay.appendChild(streamNameDisplay); |
Create and add audio state display. Subscribe to "click" event to handle mute/unmute.
Code Block |
---|
|
const audioStateDisplay = document.createElement("button");
audioStateDisplay.setAttribute("style","width:auto; height:30px");
audioStateDisplay.innerHTML = "Audio state: " + (stream.getAudioTracks().length > 0 ? stream.getAudioTracks()[0].enabled : false);
audioStateDisplay.addEventListener('click', function(){
if (stream.getAudioTracks().length > 0) {
stream.getAudioTracks()[0].enabled = !(stream.getAudioTracks()[0].enabled);
audioStateDisplay.innerHTML = "Audio state: " + stream.getAudioTracks()[0].enabled;
}
});
coreDisplay.appendChild(audioStateDisplay); |
Create stream display which will hold video element.
Code Block |
---|
|
const streamDisplay = document.createElement('div');
streamDisplay.id = id;
streamDisplay.setAttribute("style","width:auto; height:auto");
coreDisplay.appendChild(streamDisplay); |
Create video element and add it to stream display.
Code Block |
---|
|
const video = document.createElement("video");
streamDisplay.appendChild(video);
video.srcObject = stream;
video.muted = true;
video.onloadedmetadata = function (e) {
video.play();
}; |
Subscribe to track's "ended" event. Once track is ended check if video element has any active tracks. If not - remove display.
Code Block |
---|
|
stream.getTracks().forEach(function(track){
track.addEventListener("ended", function() {
video.srcObject.removeTrack(track);
//check video element has no tracks left
for (const [key, vTrack] of Object.entries(video.srcObject.getTracks())) {
if (vTrack.readyState !== "ended") {
return;
}
}
removeLocalDisplay(id);
});
}); |
Subscribe to resize event to keep video inside the container.
Code Block |
---|
|
video.addEventListener('resize', function (event) {
streamNameDisplay.innerHTML = "Name: " + name + " " + video.videoWidth + "x" + video.videoHeight;
resizeVideo(event.target);
}); |
Save display, reassemble local displays and return newly created one.
Code Block |
---|
|
localDisplays[id] = coreDisplay;
reassembleLocalLayout();
return coreDisplay; |
Define helper which will recalculate local displays grid and reassemble local displays.
Code Block |
---|
|
const reassembleLocalLayout = function() {
let gridWidth = gridSize(Object.keys(localDisplays).length).x;
let container = document.createElement('div');
let row;
let rowI = 1;
let colI = 0;
for (const [key, value] of Object.entries(localDisplays)) {
if (row) {
if (colI >= gridWidth) {
row = createRow(container);
rowI++;
colI = 0;
}
} else {
row = createRow(container);
}
$("#" + key).detach();
let col = createCol(row);
col.appendChild(value);
colI++;
}
$(localDisplayDiv).empty();
localDisplayDiv.appendChild(container);
} |
Export add function for main code.
Code Block |
---|
|
return {
add: add
} |
Remote media
Define top level closure for remote media code
Code Block |
---|
|
const initRemoteDisplay = function(room, mainDiv, peerConnection) { |
Define local variables
Code Block |
---|
|
const constants = SFU.constants;
const remoteParticipants = {}; |
Subscribe to related room events
Code Block |
---|
|
room.on(constants.SFU_ROOM_EVENT.ADD_TRACKS, function(e) {
let participant = remoteParticipants[e.info.nickName];
if (!participant) {
participant = {};
participant.nickName = e.info.nickName;
participant.tracks = [];
participant.displays = [];
remoteParticipants[participant.nickName] = participant;
}
participant.tracks.push.apply(participant.tracks, e.info.info);
for (const pTrack of e.info.info) {
if (pTrack.type === "VIDEO") {
const displayObj = {
display: createRemoteDisplay(participant.nickName, participant.nickName, null, pTrack),
mediaStream: new MediaStream(),
mids: {
audio: [],
video: undefined
},
audioStreams: {},
audioElements: {}
};
const video = displayObj.display.getElementsByTagName("video")[0];
video.srcObject = displayObj.mediaStream;
displayObj.mids.video = pTrack.mid;
participant.displays.push(displayObj);
}
}
}).on(constants.SFU_ROOM_EVENT.REMOVE_TRACKS, function(e) {
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
return;
}
for (const rTrack of e.info.info) {
for (let i = 0; i < participant.tracks.length; i++) {
if (rTrack.mid === participant.tracks[i].mid) {
participant.tracks.splice(i, 1);
break;
}
}
for (let i = 0; i < participant.displays.length; i++) {
let found = false;
const display = participant.displays[i];
if (display.mids.audio.includes(rTrack.mid)) {
//remove from mids array
display.mids.audio.splice(display.mids.audio.indexOf(rTrack.mid), 1);
//stop track and remove stream
display.audioStreams[rTrack.mid].getAudioTracks()[0].stop();
delete display.audioStreams[rTrack.mid];
//remove audio element
display.display.removeChild(display.audioElements[rTrack.mid]);
delete display.audioElements[rTrack.mid];
found = true;
} else if (display.mids.video === rTrack.mid) {
display.mids.video = undefined;
display.mediaStream.getVideoTracks()[0].stop();
found = true;
}
if (display.mids.audio.length === 0 && display.mids.video === undefined) {
const video = display.display.getElementsByTagName("video")[0]
video.pause();
video.srcObject = null;
display.display.remove();
participant.displays.splice(i, 1);
}
if (found) {
break;
}
}
}
}).on(constants.SFU_ROOM_EVENT.LEFT, function(e) {
let participant = remoteParticipants[e.name];
if (!participant) {
return;
}
delete remoteParticipants[e.name];
}).on(constants.SFU_ROOM_EVENT.TRACK_QUALITY_STATE, function(e){
console.log("Received track quality state");
const participant = remoteParticipants[e.info.nickName];
if (!participant) {
return;
}
for (const rTrack of e.info.tracks) {
const mid = rTrack.mid;
let vDisplay;
for (let i = 0; i < participant.displays.length; i++) {
const display = participant.displays[i];
if (display.mids.video === mid) {
vDisplay = display;
break;
}
}
//todo rework loops
if (vDisplay) {
for (const qualityInfo of rTrack.quality) {
for (const child of vDisplay.display.childNodes) {
if (child.childNodes.length > 0) {
for (const cChild of child.childNodes) {
if (cChild.innerHTML === qualityInfo.quality) {
if (qualityInfo.available === true) {
cChild.style.color = "gray";
} else {
cChild.style.color = "red";
}
break;
}
}
}
}
}
}
}
}); |
constants.SFU_ROOM_EVENT.ADD_TRACKS
Find participant. If not found create a new one.
Code Block |
---|
|
let participant = remoteParticipants[e.info.nickName];
if (!participant) {
participant = {};
participant.nickName = e.info.nickName;
participant.tracks = [];
participant.displays = [];
remoteParticipants[participant.nickName] = participant;
} |
Add new tracks to the participant.
Code Block |
---|
|
participant.tracks.push.apply(participant.tracks, e.info.info); |
Code Block |
---|
|
for (const pTrack of e.info.info) {
if (pTrack.type === "VIDEO") {
const displayObj = {
display: createRemoteDisplay(participant.nickName, participant.nickName, null, pTrack),
mediaStream: new MediaStream(),
mids: {
audio: [],
video: undefined
},
audioStreams: {},
audioElements: {}
};
const video = displayObj.display.getElementsByTagName("video")[0];
video.srcObject = displayObj.mediaStream;
displayObj.mids.video = pTrack.mid;
participant.displays.push(displayObj);
}
} |