Skip to content

Commit

Permalink
feat(webcodecs): add a WebGL renderer
Browse files Browse the repository at this point in the history
  • Loading branch information
yume-chan committed Mar 12, 2024
1 parent c7549ad commit 28d650f
Show file tree
Hide file tree
Showing 4 changed files with 160 additions and 36 deletions.
24 changes: 24 additions & 0 deletions libraries/scrcpy-decoder-webcodecs/src/bitmap.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import type { FrameRenderer } from "./renderer.js";

export class BitmapFrameRenderer implements FrameRenderer {
#context: ImageBitmapRenderingContext;

constructor(canvas: HTMLCanvasElement) {
this.#context = canvas.getContext("bitmaprenderer", { alpha: false })!;
}

draw(frame: VideoFrame): void {
createImageBitmap(frame)
.then((bitmap) => {
this.#context.transferFromImageBitmap(bitmap);
frame.close();
})
.catch((e) => {
console.warn(
"[@yume-chan/scrcpy-decoder-webcodecs]",
"VideoDecoder error",
e,
);
});
}
}
68 changes: 32 additions & 36 deletions libraries/scrcpy-decoder-webcodecs/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ import type {
} from "@yume-chan/scrcpy-decoder-tinyh264";
import { WritableStream } from "@yume-chan/stream-extra";

import { BitmapFrameRenderer } from "./bitmap.js";
import type { FrameRenderer } from "./renderer.js";
import { WebGLFrameRenderer } from "./webgl.js";

function toHex(value: number) {
return value.toString(16).padStart(2, "0").toUpperCase();
}
Expand Down Expand Up @@ -48,9 +52,9 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
return this.#writable;
}

#renderer: HTMLCanvasElement;
#canvas: HTMLCanvasElement;
get renderer() {
return this.#renderer;
return this.#canvas;
}

#frameRendered = 0;
Expand All @@ -68,48 +72,40 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
return this.#sizeChanged.event;
}

#context: ImageBitmapRenderingContext;
#decoder: VideoDecoder;
#config: Uint8Array | undefined;
#renderer: FrameRenderer;

#currentFrameRendered = false;
#animationFrameId = 0;

constructor(codec: ScrcpyVideoCodecId) {
this.#codec = codec;

this.#renderer = document.createElement("canvas");
this.#canvas = document.createElement("canvas");

try {
this.#renderer = new WebGLFrameRenderer(this.#canvas);
} catch {
this.#renderer = new BitmapFrameRenderer(this.#canvas);
}

this.#context = this.#renderer.getContext("bitmaprenderer", {
alpha: false,
})!;
this.#decoder = new VideoDecoder({
output: (frame) => {
createImageBitmap(frame)
.then((bitmap) => {
if (this.#currentFrameRendered) {
this.#frameRendered += 1;
} else {
this.#frameSkipped += 1;
}
this.#currentFrameRendered = false;

// PERF: H.264 renderer may draw multiple frames in one vertical sync interval to minimize latency.
// When multiple frames are drawn in one vertical sync interval,
// only the last one is visible to users.
// But this ensures users can always see the most up-to-date screen.
// This is also the behavior of official Scrcpy client.
// https://github.com/Genymobile/scrcpy/issues/3679
this.#context.transferFromImageBitmap(bitmap);
frame.close();
})
.catch((e) => {
console.warn(
"[@yume-chan/scrcpy-decoder-webcodecs]",
"createImageBitmap error",
e,
);
});
if (this.#currentFrameRendered) {
this.#frameRendered += 1;
} else {
this.#frameSkipped += 1;
}
this.#currentFrameRendered = false;

// PERF: H.264 renderer may draw multiple frames in one vertical sync interval to minimize latency.
// When multiple frames are drawn in one vertical sync interval,
// only the last one is visible to users.
// But this ensures users can always see the most up-to-date screen.
// This is also the behavior of official Scrcpy client.
// https://github.com/Genymobile/scrcpy/issues/3679
this.#renderer.draw(frame);
},
error(e) {
console.warn(
Expand Down Expand Up @@ -152,8 +148,8 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
croppedHeight,
} = h264ParseConfiguration(data);

this.#renderer.width = croppedWidth;
this.#renderer.height = croppedHeight;
this.#canvas.width = croppedWidth;
this.#canvas.height = croppedHeight;
this.#sizeChanged.fire({
width: croppedWidth,
height: croppedHeight,
Expand Down Expand Up @@ -182,8 +178,8 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
croppedHeight,
} = h265ParseConfiguration(data);

this.#renderer.width = croppedWidth;
this.#renderer.height = croppedHeight;
this.#canvas.width = croppedWidth;
this.#canvas.height = croppedHeight;
this.#sizeChanged.fire({
width: croppedWidth,
height: croppedHeight,
Expand Down
3 changes: 3 additions & 0 deletions libraries/scrcpy-decoder-webcodecs/src/renderer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export interface FrameRenderer {
draw(frame: VideoFrame): void;
}
101 changes: 101 additions & 0 deletions libraries/scrcpy-decoder-webcodecs/src/webgl.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import type { FrameRenderer } from "./renderer.js";

export class WebGLFrameRenderer implements FrameRenderer {
static vertexShaderSource = `
attribute vec2 xy;
varying highp vec2 uv;
void main(void) {
gl_Position = vec4(xy, 0.0, 1.0);
// Map vertex coordinates (-1 to +1) to UV coordinates (0 to 1).
// UV coordinates are Y-flipped relative to vertex coordinates.
uv = vec2((1.0 + xy.x) / 2.0, (1.0 - xy.y) / 2.0);
}
`;

static fragmentShaderSource = `
varying highp vec2 uv;
uniform sampler2D texture;
void main(void) {
gl_FragColor = texture2D(texture, uv);
}
`;

#context: WebGLRenderingContext;

constructor(canvas: HTMLCanvasElement) {
const gl =
canvas.getContext("webgl2", { alpha: false }) ||
canvas.getContext("webgl", { alpha: false });
if (!gl) {
throw new Error("WebGL not supported");
}
this.#context = gl;

const vertexShader = gl.createShader(gl.VERTEX_SHADER)!;
gl.shaderSource(vertexShader, WebGLFrameRenderer.vertexShaderSource);
gl.compileShader(vertexShader);
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
throw new Error(gl.getShaderInfoLog(vertexShader)!);
}

const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)!;
gl.shaderSource(
fragmentShader,
WebGLFrameRenderer.fragmentShaderSource,
);
gl.compileShader(fragmentShader);
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
throw new Error(gl.getShaderInfoLog(fragmentShader)!);
}

const shaderProgram = gl.createProgram()!;
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
throw new Error(gl.getProgramInfoLog(shaderProgram)!);
}
gl.useProgram(shaderProgram);

// Vertex coordinates, clockwise from bottom-left.
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array([-1.0, -1.0, -1.0, +1.0, +1.0, +1.0, +1.0, -1.0]),
gl.STATIC_DRAW,
);

const xyLocation = gl.getAttribLocation(shaderProgram, "xy");
gl.vertexAttribPointer(xyLocation, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(xyLocation);

// Create one texture to upload frames to.
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}

draw(frame: VideoFrame) {
const gl = this.#context;
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
gl.RGBA,
gl.UNSIGNED_BYTE,
frame,
);
frame.close();

gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.drawArrays(gl.TRIANGLE_FAN, 0, 4);
}
}

0 comments on commit 28d650f

Please sign in to comment.