Canvas Streaming¶
Overview¶
The example shows how to capture video from web camera using HTML5 Canvas with optional mirroring.
On the screenshot below the stream is published from 320x240 canvas using requestAnimationFrame method to draw frames and mirroring the frame

The code of the example¶
The example code is available on WCS server by the following path:
/usr/local/FlashphonerWebCallServer/client2/examples/demo/streaming/canvas_streaming
canvas_streaming.css- styles filecanvas_streaming.html- client pagecanvas_streaming.js- main script to work
The example can be tested by the following URL:
https://host:8888/client2/examples/demo/streaming/canvas_streaming/canvas_streaming.html
Where host - WCS server address.
Analyzing the code¶
To analyze the code take the file canvas_streaming.js version with hash 485b3fb, which is available here and can be downloaded with SDK build 2.0.259.
1. API initialization¶
Flashphoner.init() code
const init_page = function() {
//init api
try {
Flashphoner.init();
} catch (e) {
setText("notifyFlash", "Your browser doesn't support WebRTC technology needed for this example");
return;
}
...
}
2. Container creation for mock video element¶
Video and audio tracks are captured from HTML5 video element which is out of HTML page DOM and, therefore, is invisible. So the container to place the video element should also be out of DOM.
3. Connecting to the server¶
Flashphoner.createSession() code
const connect = function() {
let url = getValue('urlServer');
//create session
console.log("Create new session with url " + url);
Flashphoner.createSession({urlServer: url}).on(SESSION_STATUS.ESTABLISHED, function(session){
...
}).on(SESSION_STATUS.DISCONNECTED, function(){
...
}).on(SESSION_STATUS.FAILED, function(){
...
});
}
4. Receiving the event confirming successful connection¶
SESSION_STATUS.ESTABLISHED code
const connect = function() {
let url = getValue('urlServer');
//create session
console.log("Create new session with url " + url);
Flashphoner.createSession({urlServer: url}).on(SESSION_STATUS.ESTABLISHED, function (session) {
currentSession = session;
setStatus("connectStatus", session.status());
startStreaming();
}).on(SESSION_STATUS.DISCONNECTED, function () {
...
}).on(SESSION_STATUS.FAILED, function () {
...
});
}
5. Canvas stream publishing¶
Session.createStream(), Stream.publish() code
The following parameters are passed to createStream() method:
streamName- stream namelocalVideo- container for HTML5 video elementconstraints.audio: false- audio is captured from custom canvas streamconstraints.video: false- video is captured from custom canvas streamconstraints.customStream- custom canvas stream
const startStreaming = function() {
let session = currentSession;
let streamName = getValue("urlServer").split('/')[3];
let canvasStream = createCanvasStream();
session.createStream({
name: streamName,
display: localVideo,
constraints: {
audio: false,
video: false,
customStream: canvasStream
}
}).on(STREAM_STATUS.PUBLISHING, function (stream) {
...
}).on(STREAM_STATUS.UNPUBLISHED, function () {
...
}).on(STREAM_STATUS.FAILED, function () {
...
}).publish();
}
6. Receiving the event confirming successful stream publishing¶
STREAM_STATUS.PUBLISHING code
The published stream playback starts on this event
const startStreaming = function() {
...
session.createStream({
...
}).on(STREAM_STATUS.PUBLISHING, function (stream) {
setStatus("publishStatus", STREAM_STATUS.PUBLISHING);
playStream();
onPublishing(stream);
}).on(STREAM_STATUS.UNPUBLISHED, function () {
...
}).on(STREAM_STATUS.FAILED, function () {
...
}).publish();
}
7. Playing the stream¶
Session.createStream(), Stream.play() code
The following parameters are passed to createStream() method:
streamName- stream nameremoteVideo- container for HTML5 video elementconstraints- constraints to play audio and video tracks
Also, display size is set according to canvas size
const playStream = function() {
let session = currentSession;
let streamName = getValue("urlServer").split('/')[3];
let width = getValue("width");
let height = getValue("height");
setDisplaySize(remoteVideo.parentNode, width, height);
session.createStream({
name: streamName,
display: remoteVideo,
constraints: {
audio: !Browser.isiOS(),
video: true
}
}).on(STREAM_STATUS.PENDING, function (stream) {
...
}).on(STREAM_STATUS.PLAYING, function (stream) {
...
}).on(STREAM_STATUS.STOPPED, function () {
...
}).on(STREAM_STATUS.FAILED, function (stream) {
...
}).play();
}
8. Receiving the event confirming successful stream playback¶
STREAM_STATUS.PLAYING code
const playStream = function() {
...
session.createStream({
...
}).on(STREAM_STATUS.PENDING, function (stream) {
...
}).on(STREAM_STATUS.PLAYING, function (stream) {
setStatus("playStatus", stream.status());
onPlaying(stream);
}).on(STREAM_STATUS.STOPPED, function () {
...
}).on(STREAM_STATUS.FAILED, function (stream) {
...
}).play();
}
9. Stopping the stream playback¶
Stream.stop() code
const stopBtnClick = function() {
...
if (previewStream != null) {
previewStream.stop();
previewStream = null;
}
}
10. Receiving the event confirming successful playback stopping¶
STREAM_STATUS.STOPPED code
const playStream = function() {
...
session.createStream({
...
}).on(STREAM_STATUS.PENDING, function (stream) {
...
}).on(STREAM_STATUS.PLAYING, function (stream) {
...
}).on(STREAM_STATUS.STOPPED, function () {
setStatus("playStatus", STREAM_STATUS.STOPPED);
onStopped();
}).on(STREAM_STATUS.FAILED, function (stream) {
...
}).play();
}
11. Stopping the stream publishing after playback¶
Stream.stop() code
const onStopped = function() {
...
if (publishStream != null && publishStream.published()) {
publishStream.stop();
}
}
12. Receiving the event confirming successful published stream stopping¶
STREAM_STATUS.UNPUBLISHED code
const startStreaming = function() {
...
session.createStream({
...
}).on(STREAM_STATUS.PUBLISHING, function (stream) {
...
}).on(STREAM_STATUS.UNPUBLISHED, function () {
setStatus("publishStatus", STREAM_STATUS.UNPUBLISHED);
onUnpublished();
disconnect();
}).on(STREAM_STATUS.FAILED, function () {
...
}).publish();
}
13. Stopping canvas streaming and destroying canvas¶
14. Session disconnection¶
Session.disconnect() code
15. Receiving the event confirming successful session disconnection¶
SESSION_STATUS.DISCONNECTED code
const connect = function() {
let url = getValue('urlServer');
//create session
console.log("Create new session with url " + url);
Flashphoner.createSession({urlServer: url}).on(SESSION_STATUS.ESTABLISHED, function (session) {
...
}).on(SESSION_STATUS.DISCONNECTED, function () {
setStatus("connectStatus", SESSION_STATUS.DISCONNECTED);
onDisconnected();
}).on(SESSION_STATUS.FAILED, function () {
...
});
}
16. Canvas and video elements initialization¶
const createCanvasStream = function() {
let type = getCheckbox("webGl") ? CANVAS_TYPE.CANVAS_WEBGL : CANVAS_TYPE.CANVAS_2D;
let width = getValue("width");
let height = getValue("height");
let constraints = {};
canvas = Canvas("canvasContainer", width, height, type,
getCheckbox("mirror"), getCheckbox("useAnimFrame"));
mockVideo = Video(canvas);
if (!getCheckbox("sendVideo")) {
constraints.video = false;
} else {
constraints.video = {
width: width,
height: height
};
}
constraints.audio = getCheckbox("sendAudio");
mockVideo.start(constraints);
return canvas.canvasStream();
}
17. Canvas and video elements stopping and destroying¶
const stopCanvasStream = function() {
if (mockVideo) {
mockVideo.stop();
}
if (canvas) {
canvas.close();
}
}
18. Canvas operations¶
const Canvas = function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
const canvasObject = {
canvas: null,
useRequestAnimationFrame: false,
context: null,
stream: null,
init: function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
...
},
close: function() {
...
},
drawFrame: function(source) {
...
},
loop: function(video) {
i...
},
canvasStream: function() {
return stream;
}
};
canvasObject.init(parentId, width, height, type, mirror, useRequestAnimationFrame);
return canvasObject;
}
18.1 Canvas creation¶
const Canvas = function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
const canvasObject = {
...
init: function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
let parent = document.getElementById(parentId);
if (parent) {
canvasObject.canvas = document.createElement("canvas");
canvasObject.canvas.width = width;
canvasObject.canvas.height = height;
parent.appendChild(canvasObject.canvas);
setDisplaySize(parent, width, height);
canvasObject.mirror = mirror;
canvasObject.useRequestAnimationFrame = useRequestAnimationFrame;
if (type === CANVAS_TYPE.CANVAS_2D) {
canvasObject.context = Canvas2d(canvasObject.canvas, mirror);
} else if (type === CANVAS_TYPE.CANVAS_WEBGL) {
canvasObject.context = CanvasWebGl(canvasObject.canvas, mirror);
}
stream = canvasObject.canvas.captureStream(30);
}
},
close: function() {
...
},
drawFrame: function(source) {
...
},
loop: function(video) {
...
},
canvasStream: function() {
...
}
};
canvasObject.init(parentId, width, height, type, mirror, useRequestAnimationFrame);
return canvasObject;
}
18.2 Canvas destruction¶
const Canvas = function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
const canvasObject = {
...
init: function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
...
},
close: function() {
if (canvasObject.canvas) {
canvasObject.canvas.parentNode.style.display = "none";
canvasObject.canvas.remove();
canvasObject.canvas = null;
canvasObject.stream = null;
}
canvasObject.useRequestAnimationFrame = false;
canvasObject.context = null;
},
drawFrame: function(source) {
...
},
loop: function(video) {
...
},
canvasStream: function() {
...
}
};
canvasObject.init(parentId, width, height, type, mirror, useRequestAnimationFrame);
return canvasObject;
}
18.3 Drawing frame on canvas¶
const Canvas = function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
const canvasObject = {
...
init: function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
...
},
close: function() {
...
},
drawFrame: function(source) {
if (source && canvasObject.context) {
canvasObject.context.drawFrame(source);
}
},
loop: function(video) {
...
},
canvasStream: function() {
...
}
};
canvasObject.init(parentId, width, height, type, mirror, useRequestAnimationFrame);
return canvasObject;
}
18.4 Loop method to redraw on the canvas¶
const Canvas = function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
const canvasObject = {
...
init: function(parentId, width, height, type, mirror, useRequestAnimationFrame) {
...
},
close: function() {
...
},
drawFrame: function(source) {
...
},
loop: function(video) {
if (!video.paused && !video.ended) {
canvasObject.drawFrame(video);
if (canvasObject.useRequestAnimationFrame) {
requestAnimationFrame(() => {
canvasObject.loop(video);
});
} else {
setTimeout(() => {
canvasObject.loop(video);
}, 1000 / 30); // drawing at 30fps
}
}
},
canvasStream: function() {
...
}
};
canvasObject.init(parentId, width, height, type, mirror, useRequestAnimationFrame);
return canvasObject;
}
18.5 Mirroring 2d canvas¶
const Canvas2d = function(canvas, mirror) {
const canvas2d = {
canvas: null,
api: null,
init: function(canvas, mirror) {
if (canvas) {
canvas2d.canvas = canvas;
let context = canvas2d.canvas.getContext(CANVAS_TYPE.CANVAS_2D);
if (mirror) {
context.translate(canvas2d.canvas.width, 0);
context.scale(-1, 1);
context.save();
}
canvas2d.api = {
context: context
}
}
},
close: function() {
...
},
drawFrame: function(source) {
...
}
};
canvas2d.init(canvas, mirror);
return canvas2d;
}
18.6 Drawing a frame on 2d canvas¶
const Canvas2d = function(canvas, mirror) {
const canvas2d = {
canvas: null,
api: null,
init: function(canvas, mirror) {
...
},
close: function() {
...
},
drawFrame: function(source) {
if (source && canvas2d.api && canvas2d.api.context) {
canvas2d.api.context.drawImage(source, 0, 0);
}
}
};
canvas2d.init(canvas, mirror);
return canvas2d;
}
18.7 Mirroring WebGL canvas¶
const CanvasWebGl = function(canvas, mirror) {
const canvasWebGl = {
canvas: null,
api: null,
init: function(canvas, mirror) {
if (canvas) {
canvasWebGl.canvas = canvas;
let context = canvasWebGl.canvas.getContext(CANVAS_TYPE.CANVAS_WEBGL);
let vertexShaderSource = ...;
if (mirror) {
vertexShaderSource = `
attribute vec2 a_position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main() {
gl_Position = vec4(a_position, 0, 1);
v_texCoord = vec2(1.0 - a_texCoord.x, a_texCoord.y); // X axis mirroring
}
`;
}
...
const texture = context.createTexture();
...
context.pixelStorei(context.UNPACK_FLIP_Y_WEBGL, true);
...
canvasWebGl.api = {
context: context,
program: program,
positionBuffer: positionBuffer,
posLoc: posLoc,
texCoordBuffer: texCoordBuffer,
texLoc: texLoc,
texture: texture,
uTexLoc: uTexLoc
};
}
},
close: function() {
...
},
drawFrame: function(source) {
...
}
};
canvasWebGl.init(canvas, mirror);
return canvasWebGl;
}
18.8 Drawing a frame on WebGL canvas¶
const CanvasWebGl = function(canvas, mirror) {
const canvasWebGl = {
canvas: null,
api: null,
init: function(canvas, mirror) {
...
},
close: function() {
...
},
drawFrame: function(source) {
if (source && canvasWebGl.api && canvasWebGl.api.context) {
let context = canvasWebGl.api.context;
context.viewport(0, 0, canvasWebGl.canvas.width, canvasWebGl.canvas.height);
context.clear(context.COLOR_BUFFER_BIT);
context.useProgram(canvasWebGl.api.program);
// Position
context.bindBuffer(context.ARRAY_BUFFER, canvasWebGl.api.positionBuffer);
context.enableVertexAttribArray(canvasWebGl.api.posLoc);
context.vertexAttribPointer(canvasWebGl.api.posLoc, 2, context.FLOAT, false, 0, 0);
// Texture coordinates
context.bindBuffer(context.ARRAY_BUFFER, canvasWebGl.api.texCoordBuffer);
context.enableVertexAttribArray(canvasWebGl.api.texLoc);
context.vertexAttribPointer(canvasWebGl.api.texLoc, 2, context.FLOAT, false, 0, 0);
// Renew texture from source
context.bindTexture(context.TEXTURE_2D, canvasWebGl.api.texture);
context.texImage2D(
context.TEXTURE_2D, 0, context.RGBA, context.RGBA,
context.UNSIGNED_BYTE, source
);
context.uniform1i(canvasWebGl.api.uTexLoc, 0);
context.drawArrays(context.TRIANGLES, 0, 6);
}
}
};
canvasWebGl.init(canvas, mirror);
return canvasWebGl;
}
19. Video capture element operations¶
const Video = function(canvas) {
const videoObject = {
canvas: null,
video: null,
init: function(canvas) {
...
},
start: function(constraints) {
...
},
stop: function() {
...
}
};
videoObject.init(canvas);
return videoObject;
}
19.1 Video element creation¶
const Video = function(canvas) {
const videoObject = {
canvas: null,
video: null,
init: function(canvas) {
videoObject.canvas = canvas;
videoObject.video = document.createElement("video");
videoObject.video.setAttribute("playsinline", "");
videoObject.video.setAttribute("webkit-playsinline", "");
videoObject.video.muted = true;
videoObject.video.addEventListener("play", () => {
videoObject.canvas.loop(videoObject.video);
}, 0);
},
start: function(constraints) {
...
},
stop: function() {
...
}
};
videoObject.init(canvas);
return videoObject;
}
19.2 Video capture starting¶
const Video = function(canvas) {
const videoObject = {
canvas: null,
video: null,
init: function(canvas) {
...
},
start: function(constraints) {
let hasVideo = false;
let hasAudio = false;
let canvasStream = videoObject.canvas.canvasStream();
if (constraints.video) {
hasVideo = true;
}
if (constraints.audio) {
hasAudio = true;
}
navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
videoObject.video.srcObject = stream;
videoObject.video.onloadedmetadata = () => {
if (!hasVideo) {
canvasStream.removeTrack(canvasStream.getVideoTracks()[0]);
}
if (hasAudio) {
videoObject.video.muted = false;
try {
let audioContext = new (window.AudioContext || window.webkitAudioContext)();
let source = audioContext.createMediaElementSource(videoObject.video);
let destination = audioContext.createMediaStreamDestination();
source.connect(destination);
canvasStream.addTrack(destination.stream.getAudioTracks()[0]);
} catch (e) {
console.warn("Failed to create audio context");
}
}
};
videoObject.video.play();
});
},
stop: function() {
...
}
};
videoObject.init(canvas);
return videoObject;
}
19.3 Video capture stopping¶
const Video = function(canvas) {
const videoObject = {
canvas: null,
video: null,
init: function(canvas) {
...
},
start: function(constraints) {
...
},
stop: function() {
if (videoObject.video) {
videoObject.video.pause();
videoObject.video.removeEventListener('play', null);
let tracks = videoObject.video.srcObject.getTracks();
for (let i = 0; i < tracks.length; i++) {
tracks[i].stop();
}
videoObject.video.srcObject = null;
videoObject.video = null;
videoObject.canvas = null;
}
}
};
videoObject.init(canvas);
return videoObject;
}