Skip to content

Instantly share code, notes, and snippets.

@greggman
Last active December 2, 2025 00:00
Show Gist options
  • Select an option

  • Save greggman/26f80b162a04a847c0a1ecd77ee21f40 to your computer and use it in GitHub Desktop.

Select an option

Save greggman/26f80b162a04a847c0a1ecd77ee21f40 to your computer and use it in GitHub Desktop.
WebGPU copyExternalImageToTexture video
/*bug-in-github-api-content-can-not-be-empty*/
<canvas></canvas>
async function main() {
const adapter = await navigator.gpu?.requestAdapter();
const device = await adapter?.requestDevice();
if (!device) {
fail('need a browser that supports WebGPU');
return;
}
const canvas = document.querySelector('canvas');
const context = canvas.getContext('webgpu');
const preferredFormat = navigator.gpu.getPreferredCanvasFormat();
context.configure({
format: preferredFormat,
device,
});
function getSourceSize(source) {
return [
source.videoWidth || source.width,
source.videoHeight || source.height,
];
}
function copySourceToTexture(device, texture, source, {flipY} = {}) {
device.queue.copyExternalImageToTexture(
{ source, flipY, },
{ texture },
getSourceSize(source),
);
}
function createTextureFromSource(device, source, options = {}) {
const size = getSourceSize(source);
const texture = device.createTexture({
format: 'rgba8unorm',
mipLevelCount: options.mips ? numMipLevels(...size) : 1,
size,
usage: GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
copySourceToTexture(device, texture, source, options);
return texture;
}
const video = document.createElement('video');
video.muted = true;
video.loop = true;
video.preload = 'auto';
video.crossOrigin = '';
video.src = 'https://webgpufundamentals.org/webgpu/resources/videos/Golden_retriever_swimming_the_doggy_paddle-360-no-audio.webm';
await video.play();
let texture;
try {
texture = createTextureFromSource(device, video);
console.log('started');
} catch (e) {
console.error(e);
}
const module = device.createShaderModule({
code: `
@vertex fn vs(@builtin(vertex_index) vNdx: u32) -> @builtin(position) vec4f {
let pos = array(vec2f(-1, 3), vec2f(3, -1), vec2f(-1, -1));
return vec4f(pos[vNdx], 0, 1);
}
@group(0) @binding(0) var tex: texture_2d<f32>;
@group(0) @binding(1) var smp: sampler;
@fragment fn fs(@builtin(position) pos: vec4f) -> @location(0) vec4f {
let uv = pos.xy / vec2f(300, 150);
return textureSample(tex, smp, uv);
}
`,
});
const pipeline = device.createRenderPipeline({
layout: 'auto',
vertex: { module },
fragment: { module, targets: [{ format: preferredFormat }]},
});
const bindGroup = device.createBindGroup({
layout: pipeline.getBindGroupLayout(0),
entries: [
{ binding: 0, resource: texture.createView() },
{ binding: 1, resource: device.createSampler() },
],
});
const requestVideoFrame = video.requestVideFrameCallback
? video.requestVideFrameCallback.bind(video)
: (cb) => requestAnimationFrame(cb);
function updateVideo() {
copySourceToTexture(device, texture, video);
requestVideoFrame(updateVideo);
}
requestVideoFrame(updateVideo);
function render() {
const encoder = device.createCommandEncoder();
const pass = encoder.beginRenderPass({
colorAttachments: [
{
view: context.getCurrentTexture().createView(),
loadOp: 'clear',
storeOp: 'store',
},
],
});
pass.setPipeline(pipeline);
pass.setBindGroup(0, bindGroup);
pass.draw(3);
pass.end();
device.queue.submit([encoder.finish()]);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
function fail(msg) {
// eslint-disable-next-line no-alert
alert(msg);
}
main();
{"name":"WebGPU copyExternalImageToTexture video","settings":{},"filenames":["index.html","index.css","index.js"]}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment