Skip to content

Commit

Permalink
feat: VideoProcessors v3.0 (#2053)
Browse files Browse the repository at this point in the history
* Refactor video frame capture logic to its own module.
* Use insertable streams where supported.
* Support inputFrameBufferType=videoframe.
* Support outputFrameBufferContextType=bitmaprenderer.
* Support outputFrameBufferContextType=bitmaprenderer only for chrome.
* Work around: markdown-it/linkify-it#111

* VBLOCKS-3643 VBLOCKS-3644

* docs: introduce beta support for Video Processor V3

---------

Co-authored-by: twilio-ci <[email protected]>
Co-authored-by: Luis Rivas <[email protected]>
  • Loading branch information
3 people authored Dec 10, 2024
1 parent 281ae62 commit b9d0b5d
Show file tree
Hide file tree
Showing 11 changed files with 228 additions and 66 deletions.
14 changes: 13 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,20 @@ The Twilio Programmable Video SDKs use [Semantic Versioning](http://www.semver.o

**Version 1.x reached End of Life on September 8th, 2021.** See the changelog entry [here](https://www.twilio.com/changelog/end-of-life-complete-for-unsupported-versions-of-the-programmable-video-sdk). Support for the 1.x version ended on December 4th, 2020.

2.29.0 (December 5, 2024)
================================

Changes
-------

### Video Processor V3 support (Beta)
- `AddProcessorOptions.outputFrameBufferContextType = 'bitmaprenderer'` is now supported on Safari and Firefox. (VBLOCKS-3643, VBLOCKS-3644)
- `AddProcessorOptions.inputFrameBufferType` now has a new value `videoframe`. On browsers that support [`VideoFrame`](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame), the `inputFrameBuffer` argument of [`VideoProcessor.processFrame()`](https://twilio.github.io/twilio-video-processors.js/classes/VirtualBackgroundProcessor.html#processFrame) will be a `VideoFrame`. On other supported browsers, it will be an `HTMLVideoElement`.
- `AddProcessorOptions.outputFrameBufferContextType` now has a new value `bitmaprenderer`. Currently, this is only **supported for Chromium-based browsers**. On other supported browsers, it falls back to `2d`.
- Patched the build script to work around the issue: https://github.com/markdown-it/linkify-it/issues/111.

2.28.2 (November 22, 2024)
========================
==========================

Bug Fixes
---------
Expand Down
2 changes: 1 addition & 1 deletion LICENSE.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
The following license applies to all parts of this software except as
documented below.

Copyright (C) 2019-2022 Twilio, inc.
Copyright (C) 2019-2025 Twilio, inc.
All rights reserved.

Redistribution and use in source and binary forms, with or without
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ Releases of twilio-video.js are hosted on a CDN, and you can include these
directly in your web app using a &lt;script&gt; tag.

```html
<script src="//sdk.twilio.com/js/video/releases/2.28.2/twilio-video.min.js"></script>
<script src="//sdk.twilio.com/js/video/releases/2.29.0/twilio-video.min.js"></script>
```

Using this method, twilio-video.js will set a browser global:
Expand Down
68 changes: 68 additions & 0 deletions lib/media/track/capturevideoframes.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/* globals MediaStreamTrackGenerator, MediaStreamTrackProcessor, TransformStream */
'use strict';

const { DEFAULT_FRAME_RATE } = require('../../util/constants');

function captureVideoFramesSetInterval(videoEl, processVideoFrame) {
const [track] = videoEl.srcObject.getVideoTracks();
const { frameRate = DEFAULT_FRAME_RATE } = track.getSettings();
let sampleInterval;

const readable = new ReadableStream({
start(controller) {
sampleInterval = setInterval(
() => controller.enqueue(),
1000 / frameRate
);
}
});

const transformer = new TransformStream({
transform() {
return processVideoFrame();
}
});

readable
.pipeThrough(transformer)
.pipeTo(new WritableStream())
.then(() => { /* noop */ });

return () => {
clearInterval(sampleInterval);
};
}

function captureVideoFramesInsertableStreams(videoEl, processVideoFrame, videoFrameType) {
const [track] = videoEl.srcObject.getVideoTracks();
const { readable } = new MediaStreamTrackProcessor({ track });
const generator = new MediaStreamTrackGenerator({ kind: 'video' });
let shouldStop = false;

const transformer = new TransformStream({
transform(videoFrame, controller) {
const promise = videoFrameType === 'videoframe'
? processVideoFrame(videoFrame)
: Promise.resolve(videoFrame.close())
.then(processVideoFrame);
return promise.finally(() => {
if (shouldStop) {
controller.terminate();
}
});
}
});

readable
.pipeThrough(transformer)
.pipeTo(generator.writable)
.then(() => { /* noop */ });

return () => {
shouldStop = true;
};
}

module.exports = typeof MediaStreamTrackGenerator === 'function' && typeof MediaStreamTrackProcessor === 'function'
? captureVideoFramesInsertableStreams
: captureVideoFramesSetInterval;
87 changes: 52 additions & 35 deletions lib/media/track/videotrack.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
'use strict';

const MediaTrack = require('./mediatrack');
const captureVideoFrames = require('./capturevideoframes');
const VideoProcessorEventObserver = require('./videoprocessoreventobserver');
const { guessBrowser } = require('../../webrtc/util');
const { DEFAULT_FRAME_RATE } = require('../../util/constants');

/**
Expand Down Expand Up @@ -34,10 +36,6 @@ class VideoTrack extends MediaTrack {
constructor(mediaTrackTransceiver, options) {
super(mediaTrackTransceiver, options);
Object.defineProperties(this, {
_captureTimeoutId: {
value: null,
writable: true
},
_isCapturing: {
value: false,
writable: true
Expand All @@ -58,6 +56,10 @@ class VideoTrack extends MediaTrack {
value: {},
writable: true,
},
_stopCapture: {
value: () => {},
writable: true
},
_unmuteHandler: {
value: null,
writable: true
Expand Down Expand Up @@ -129,30 +131,21 @@ class VideoTrack extends MediaTrack {
this._processorEventObserver.emit('start');
this._log.debug('Start capturing frames');

let startTime = Date.now();
let processFramePeriodMs;
const { inputFrameBufferType } = this._processorOptions;

this._dummyEl.play().then(() => {
const captureFrame = cb => {
clearTimeout(this._captureTimeoutId);
const { frameRate = DEFAULT_FRAME_RATE } = this.mediaStreamTrack.getSettings();
const capturePeriodMs = Math.floor(1000 / frameRate);
let delay = capturePeriodMs - processFramePeriodMs;
if (delay < 0 || typeof processFramePeriodMs !== 'number') {
delay = 0;
}
this._captureTimeoutId = setTimeout(cb, delay);
};
const process = () => {
const process = videoFrame => {
const checkResult = this._checkIfCanCaptureFrames();
if (!checkResult.canCaptureFrames) {
if (videoFrame) {
videoFrame.close();
}
this._isCapturing = false;
this._stopCapture();
this._processorEventObserver.emit('stop', checkResult.message);
this._log.debug('Cannot capture frames. Stopping capturing frames.');
return;
return Promise.resolve();
}
startTime = Date.now();

const { width = 0, height = 0 } = this.mediaStreamTrack.getSettings();
// Setting the canvas' dimension triggers a redraw.
// Only set it if it has changed.
Expand All @@ -165,32 +158,45 @@ class VideoTrack extends MediaTrack {
this._inputFrame.width = width;
this._inputFrame.height = height;
}
this._inputFrame.getContext('2d').drawImage(this._dummyEl, 0, 0, width, height);
this._inputFrame.getContext('2d').drawImage(
this._dummyEl,
0,
0,
width,
height
);
}

const input = videoFrame || (
['video', 'videoframe'].includes(inputFrameBufferType)
? this._dummyEl
: this._inputFrame
);
let result = null;

try {
const input = this._processorOptions.inputFrameBufferType === 'video' ? this._dummyEl : this._inputFrame;
result = this.processor.processFrame(input, this._outputFrame);
} catch (ex) {
this._log.debug('Exception detected after calling processFrame.', ex);
}
((result instanceof Promise) ? result : Promise.resolve(result))
return ((result instanceof Promise) ? result : Promise.resolve(result))
.then(() => {
if (this._outputFrame) {
if (typeof this.processedTrack.requestFrame === 'function') {
this.processedTrack.requestFrame();
}
this._processorEventObserver.emit('stats');
}
})
.finally(() => {
processFramePeriodMs = Date.now() - startTime;
captureFrame(process);
});
};
captureFrame(process);
}).catch(error => this._log.error('Video element cannot be played', { error, track: this }));
this._stopCapture = captureVideoFrames(
this._dummyEl,
process,
inputFrameBufferType
);
}).catch(error => this._log.error(
'Video element cannot be played',
{ error, track: this }
));
}

/**
Expand Down Expand Up @@ -295,7 +301,11 @@ class VideoTrack extends MediaTrack {
if (typeof OffscreenCanvas === 'undefined' && inputFrameBufferType === 'offscreencanvas') {
throw new Error('OffscreenCanvas is not supported by this browser.');
}
if (inputFrameBufferType && inputFrameBufferType !== 'video' && inputFrameBufferType !== 'canvas' && inputFrameBufferType !== 'offscreencanvas') {
if (inputFrameBufferType
&& inputFrameBufferType !== 'videoframe'
&& inputFrameBufferType !== 'video'
&& inputFrameBufferType !== 'canvas'
&& inputFrameBufferType !== 'offscreencanvas') {
throw new Error(`Invalid inputFrameBufferType of ${inputFrameBufferType}`);
}
if (!inputFrameBufferType) {
Expand All @@ -322,6 +332,7 @@ class VideoTrack extends MediaTrack {
// that the correct type is used and on Firefox, it throws an exception if you try to capture
// frames prior calling getContext https://bugzilla.mozilla.org/show_bug.cgi?id=1572422
outputFrameBufferContextType = outputFrameBufferContextType || '2d';

const ctx = this._outputFrame.getContext(outputFrameBufferContextType);
if (!ctx) {
throw new Error(`Cannot get outputFrameBufferContextType: ${outputFrameBufferContextType}.`);
Expand Down Expand Up @@ -484,7 +495,8 @@ class VideoTrack extends MediaTrack {

this._processorEventObserver.emit('remove');
this._log.debug('Removing VideoProcessor from the VideoTrack', processor);
clearTimeout(this._captureTimeoutId);
this._stopCapture();
this._stopCapture = () => {};
this.mediaStreamTrack.removeEventListener('unmute', this._unmuteHandler);
this._processorOptions = {};
this._unmuteHandler = null;
Expand Down Expand Up @@ -529,7 +541,7 @@ function dimensionsChanged(track, elem) {
* Any exception raised (either synchronously or asynchronously) in `processFrame` will result in the frame being dropped.
* This callback has the following signature:<br/><br/>
* <code>processFrame(</code><br/>
* &nbsp;&nbsp;<code>inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement,</code><br/>
* &nbsp;&nbsp;<code>inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement | VideoFrame,</code><br/>
* &nbsp;&nbsp;<code>outputFrameBuffer: HTMLCanvasElement</code><br/>
* <code>): Promise&lt;void&gt; | void;</code>
*
Expand All @@ -554,6 +566,10 @@ function dimensionsChanged(track, elem) {
* Possible values include the following.
* <br/>
* <br/>
* `videoframe` - Your Video Processor will receive a [VideoFrame](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame).
* On browsers that do not support `VideoFrame`, it will receive an [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) instead.
* <br/>
* <br/>
* `offscreencanvas` - Your Video Processor will receive an [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas)
* which is good for canvas-related processing that can be rendered off screen.
* <br/>
Expand All @@ -567,8 +583,9 @@ function dimensionsChanged(track, elem) {
* the frame directly to your output canvas.
* @property {string} [outputFrameBufferContextType="2d"] - The SDK needs the [context type](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/getContext)
* that your Video Processor uses in order to properly generate the processed track. For example, if your Video Processor uses WebGL2 (`canvas.getContext('webgl2')`),
* you should set `outputFrameBufferContextType` to `webgl2`. Or if you're using Canvas 2D processing (`canvas.getContext('2d')`),
* you should set `outputFrameBufferContextType` to `2d`.
* you should set `outputFrameBufferContextType` to `webgl2`. If you're using Canvas 2D processing (`canvas.getContext('2d')`),
* you should set `outputFrameBufferContextType` to `2d`. If the output frame is an [ImageBitmap](https://developer.mozilla.org/en-US/docs/Web/API/ImageBitmap),
* you should set `outputFrameBufferContextType` to `bitmaprenderer`.
*/

/**
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "twilio-video",
"title": "Twilio Video",
"description": "Twilio Video JavaScript Library",
"version": "2.28.3-dev",
"version": "2.29.1-dev",
"homepage": "https://twilio.com",
"author": "Mark Andrus Roberts <[email protected]>",
"contributors": [
Expand Down
53 changes: 53 additions & 0 deletions test/lib/mockstreams.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
'use strict';

class ReadableStream {
constructor({ start }) {
this._transformStream = null;
this._writableStream = null;
setTimeout(() => start({
enqueue: arg => {
if (this._transformStream) {
this._transformStream.transform(arg, {
enqueue: arg => {
if (this._writableStream) {
this._writableStream.write(arg);
}
}
});
}
}
}));
}

pipeThrough(transformStream) {
this._transformStream = transformStream;
return this;
}

pipeTo(writableStream) {
this._writableStream = writableStream;
return Promise.resolve();
}
}

class TransformStream {
constructor({ transform }) {
this.transform = transform;
}
}

class WritableStream {
constructor() {
this.write = () => {};
}
}

function mockStreams(_global) {
_global = _global || global;
_global.ReadableStream = ReadableStream;
_global.TransformStream = TransformStream;
_global.WritableStream = WritableStream;
return _global;
}

module.exports = mockStreams;
1 change: 1 addition & 0 deletions test/unit/index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
'use strict';

if (typeof window === 'undefined') {
require('../lib/mockstreams')();
require('../lib/mockwebrtc')();
}

Expand Down
Loading

0 comments on commit b9d0b5d

Please sign in to comment.