Skip to content

Commit

Permalink
Introduce VideoFrameTexture. (#30270)
Browse files Browse the repository at this point in the history
* VideoTexture: Introduce `setFrame()`.

* Clean up.

* Examples: More clean up.

* Lib: Add source of demuxer_mp4.js.

* Update VideoTexture.html

* Introduce `VideoFrameTexture`.

* VideoTexture: Clean up.

* VideoFrameTexture: More clean up.

* VideoTexture: Clean up.

* VideoFrameTexture: Improve comment.
  • Loading branch information
Mugen87 authored Jan 8, 2025
1 parent 73eddcb commit 48df2ee
Show file tree
Hide file tree
Showing 11 changed files with 364 additions and 2 deletions.
3 changes: 2 additions & 1 deletion .eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@
"CodeMirror": "readonly",
"esprima": "readonly",
"jsonlint": "readonly",
"VideoFrame": "readonly"
"VideoFrame": "readonly",
"VideoDecoder": "readonly"
},
"rules": {
"no-throw-literal": [
Expand Down
98 changes: 98 additions & 0 deletions docs/api/en/textures/VideoFrameTexture.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<base href="../../../" />
<script src="page.js"></script>
<link type="text/css" rel="stylesheet" href="page.css" />
</head>
<body>
[page:VideoTexture] &rarr;

<h1>[name]</h1>

<p class="desc">
This class can be used as an alternative way to define video data. Instead of using
an instance of `HTMLVideoElement` like with `VideoTexture`, [name] expects each frame is
defined manaully via [page:.setFrame setFrame](). A typical use case for this module is when
video frames are decoded with the WebCodecs API.
</p>

<h2>Code Example</h2>

<code>
const texture = new THREE.VideoFrameTexture();
texture.setFrame( frame );
</code>

<h2>Examples</h2>

<p>
[example:webgpu_video_frame video / frame]
</p>

<h2>Constructor</h2>
<h3>
[name]( [param:Constant mapping], [param:Constant wrapS],
[param:Constant wrapT], [param:Constant magFilter], [param:Constant minFilter],
[param:Constant format], [param:Constant type], [param:Number anisotropy] )
</h3>
<p>
[page:Constant mapping] -- How the image is applied to the object. An
object type of [page:Textures THREE.UVMapping].
See [page:Textures mapping constants] for other choices.<br />

[page:Constant wrapS] -- The default is [page:Textures THREE.ClampToEdgeWrapping].
See [page:Textures wrap mode constants] for
other choices.<br />

[page:Constant wrapT] -- The default is [page:Textures THREE.ClampToEdgeWrapping].
See [page:Textures wrap mode constants] for
other choices.<br />

[page:Constant magFilter] -- How the texture is sampled when a texel
covers more than one pixel. The default is [page:Textures THREE.LinearFilter].
See [page:Textures magnification filter constants]
for other choices.<br />

[page:Constant minFilter] -- How the texture is sampled when a texel
covers less than one pixel. The default is [page:Textures THREE.LinearFilter].
See [page:Textures minification filter constants] for
other choices.<br />

[page:Constant format] -- The default is [page:Textures THREE.RGBAFormat].
See [page:Textures format constants] for other choices.<br />

[page:Constant type] -- Default is [page:Textures THREE.UnsignedByteType].
See [page:Textures type constants] for other choices.<br />

[page:Number anisotropy] -- The number of samples taken along the axis
through the pixel that has the highest density of texels. By default, this
value is `1`. A higher value gives a less blurry result than a basic mipmap,
at the cost of more texture samples being used. Use
[page:WebGLrenderer.getMaxAnisotropy renderer.getMaxAnisotropy]() to find
the maximum valid anisotropy value for the GPU; this value is usually a
power of 2.<br /><br />
</p>

<h2>Properties</h2>

<p>See the base [page:VideoTexture VideoTexture] class for common properties.</p>

<h2>Methods</h2>

<p>See the base [page:VideoTexture VideoTexture] class for common methods.</p>

<h3>[method:undefined setFrame]( [param:VideoFrame frame] )</h3>
<p>
Sets the current frame of the video. This will automatically update the texture
so the data can be used for rendering.
</p>

<h2>Source</h2>

<p>
[link:https://github.com/mrdoob/three.js/blob/master/src/[path].js src/[path].js]
</p>
</body>
</html>
1 change: 1 addition & 0 deletions docs/list.json
Original file line number Diff line number Diff line change
Expand Up @@ -318,6 +318,7 @@
"FramebufferTexture": "api/en/textures/FramebufferTexture",
"Source": "api/en/textures/Source",
"Texture": "api/en/textures/Texture",
"VideoFrameTexture": "api/en/textures/VideoFrameTexture",
"VideoTexture": "api/en/textures/VideoTexture"
}

Expand Down
1 change: 1 addition & 0 deletions examples/files.json
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,7 @@
"webgpu_tsl_vfx_flames",
"webgpu_tsl_vfx_linkedparticles",
"webgpu_tsl_vfx_tornado",
"webgpu_video_frame",
"webgpu_video_panorama",
"webgpu_volume_cloud",
"webgpu_volume_perlin",
Expand Down
109 changes: 109 additions & 0 deletions examples/jsm/libs/demuxer_mp4.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import MP4Box from 'https://cdn.jsdelivr.net/npm/[email protected]/+esm';

// From: https://w3c.github.io/webcodecs/samples/video-decode-display/

// Wraps an MP4Box File as a WritableStream underlying sink.
class MP4FileSink {
#setStatus = null;
#file = null;
#offset = 0;

constructor(file, setStatus) {
this.#file = file;
this.#setStatus = setStatus;
}

write(chunk) {
// MP4Box.js requires buffers to be ArrayBuffers, but we have a Uint8Array.
const buffer = new ArrayBuffer(chunk.byteLength);
new Uint8Array(buffer).set(chunk);

// Inform MP4Box where in the file this chunk is from.
buffer.fileStart = this.#offset;
this.#offset += buffer.byteLength;

// Append chunk.
this.#setStatus("fetch", (this.#offset / (1024 ** 2)).toFixed(1) + " MiB");
this.#file.appendBuffer(buffer);
}

close() {
this.#setStatus("fetch", "Done");
this.#file.flush();
}
}

// Demuxes the first video track of an MP4 file using MP4Box, calling
// `onConfig()` and `onChunk()` with appropriate WebCodecs objects.
export class MP4Demuxer {
#onConfig = null;
#onChunk = null;
#setStatus = null;
#file = null;

constructor(uri, {onConfig, onChunk, setStatus}) {
this.#onConfig = onConfig;
this.#onChunk = onChunk;
this.#setStatus = setStatus;

// Configure an MP4Box File for demuxing.
this.#file = MP4Box.createFile();
this.#file.onError = error => setStatus("demux", error);
this.#file.onReady = this.#onReady.bind(this);
this.#file.onSamples = this.#onSamples.bind(this);

// Fetch the file and pipe the data through.
const fileSink = new MP4FileSink(this.#file, setStatus);
fetch(uri).then(response => {
// highWaterMark should be large enough for smooth streaming, but lower is
// better for memory usage.
response.body.pipeTo(new WritableStream(fileSink, {highWaterMark: 2}));
});
}

// Get the appropriate `description` for a specific track. Assumes that the
// track is H.264, H.265, VP8, VP9, or AV1.
#description(track) {
const trak = this.#file.getTrackById(track.id);
for (const entry of trak.mdia.minf.stbl.stsd.entries) {
const box = entry.avcC || entry.hvcC || entry.vpcC || entry.av1C;
if (box) {
const stream = new MP4Box.DataStream(undefined, 0, MP4Box.DataStream.BIG_ENDIAN);
box.write(stream);
return new Uint8Array(stream.buffer, 8); // Remove the box header.
}
}
throw new Error("avcC, hvcC, vpcC, or av1C box not found");
}

#onReady(info) {
this.#setStatus("demux", "Ready");
const track = info.videoTracks[0];

// Generate and emit an appropriate VideoDecoderConfig.
this.#onConfig({
// Browser doesn't support parsing full vp8 codec (eg: `vp08.00.41.08`),
// they only support `vp8`.
codec: track.codec.startsWith('vp08') ? 'vp8' : track.codec,
codedHeight: track.video.height,
codedWidth: track.video.width,
description: this.#description(track),
});

// Start demuxing.
this.#file.setExtractionOptions(track.id);
this.#file.start();
}

#onSamples(track_id, ref, samples) {
// Generate and emit an EncodedVideoChunk for each demuxed sample.
for (const sample of samples) {
this.#onChunk(new EncodedVideoChunk({
type: sample.is_sync ? "key" : "delta",
timestamp: 1e6 * sample.cts / sample.timescale,
duration: 1e6 * sample.duration / sample.timescale,
data: sample.data
}));
}
}
}
Binary file added examples/screenshots/webgpu_video_frame.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
3 changes: 2 additions & 1 deletion examples/tags.json
Original file line number Diff line number Diff line change
Expand Up @@ -149,5 +149,6 @@
"webgpu_sky": [ "sun" ],
"webgpu_tonemapping": [ "gltf" ],
"webgpu_tsl_compute_attractors_particles": [ "gpgpu" ],
"webgpu_ocean": [ "water" ]
"webgpu_ocean": [ "water" ],
"webgpu_video_frame": [ "webcodecs" ]
}
116 changes: 116 additions & 0 deletions examples/webgpu_video_frame.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgpu - video frames</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<link type="text/css" rel="stylesheet" href="main.css">
</head>
<body>
<div id="info">
<a href="https://threejs.org" target="_blank" rel="noopener">three.js</a> - video - frames<br/>
Decodes all frames from a MP4 file and renders them onto a plane as fast as possible.<br/>
<a href="https://github.com/gpac/mp4box.js/" target="_blank" rel="noopener">mp4box.js</a> used for MP4 parsing.
</div>

<script type="importmap">
{
"imports": {
"three": "../build/three.webgpu.js",
"three/webgpu": "../build/three.webgpu.js",
"three/tsl": "../build/three.tsl.js",
"three/addons/": "./jsm/"
}
}
</script>

<script type="module">

import * as THREE from 'three';

import { MP4Demuxer } from 'three/addons/libs/demuxer_mp4.js';

let camera, scene, renderer;

init();

function init() {

camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, .25, 10 );
camera.position.set( 0, 0, 1 );

scene = new THREE.Scene();

const geometry = new THREE.PlaneGeometry();

const videoTexture = new THREE.VideoFrameTexture();
videoTexture.colorSpace = THREE.SRGBColorSpace;

// eslint-disable-next-line compat/compat
const decoder = new VideoDecoder( {
output( frame ) {

videoTexture.setFrame( frame );

},
error( e ) {

console.error( 'VideoDecoder:', e );

}
} );

new MP4Demuxer( './textures/sintel.mp4', {
onConfig( config ) {

decoder.configure( config );

},
onChunk( chunk ) {

decoder.decode( chunk );

},
setStatus( s ) {

console.info( 'MP4Demuxer:', s );

}
} );

const material = new THREE.MeshBasicMaterial( { map: videoTexture } );

const mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );

renderer = new THREE.WebGPURenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.setAnimationLoop( animate );
document.body.appendChild( renderer.domElement );

//

window.addEventListener( 'resize', onWindowResize );

}

function onWindowResize() {

camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();

renderer.setSize( window.innerWidth, window.innerHeight );

}

function animate() {

renderer.render( scene, camera );

}


</script>
</body>
</html>
1 change: 1 addition & 0 deletions src/Three.Core.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export { Line } from './objects/Line.js';
export { Points } from './objects/Points.js';
export { Group } from './objects/Group.js';
export { VideoTexture } from './textures/VideoTexture.js';
export { VideoFrameTexture } from './textures/VideoFrameTexture.js';
export { FramebufferTexture } from './textures/FramebufferTexture.js';
export { Source } from './textures/Source.js';
export { DataTexture } from './textures/DataTexture.js';
Expand Down
Loading

0 comments on commit 48df2ee

Please sign in to comment.