From ea735d5581c4b0500ff1cac6359e4a24361d34aa Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Sat, 4 Oct 2025 07:05:43 -0700 Subject: [PATCH 01/19] shader --- app/src/room/audio.ts | 97 +---------- app/src/room/broadcast.ts | 73 +------- app/src/room/canvas.ts | 218 +++++------------------- app/src/room/gl/background.ts | 70 ++++++++ app/src/room/gl/broadcast-renderer.ts | 172 +++++++++++++++++++ app/src/room/gl/camera.ts | 49 ++++++ app/src/room/gl/context.ts | 44 +++++ app/src/room/gl/shader.ts | 113 ++++++++++++ app/src/room/gl/shaders/background.frag | 80 +++++++++ app/src/room/gl/shaders/background.vert | 29 ++++ app/src/room/gl/shaders/broadcast.frag | 47 +++++ app/src/room/gl/shaders/broadcast.vert | 22 +++ app/src/room/space.ts | 121 ++++++------- app/src/room/video.ts | 141 ++++++++++++++- app/vite.config.ts | 2 + flake.nix | 3 + moq | 2 +- 17 files changed, 885 insertions(+), 398 deletions(-) create mode 100644 app/src/room/gl/background.ts create mode 100644 app/src/room/gl/broadcast-renderer.ts create mode 100644 app/src/room/gl/camera.ts create mode 100644 app/src/room/gl/context.ts create mode 100644 app/src/room/gl/shader.ts create mode 100644 app/src/room/gl/shaders/background.frag create mode 100644 app/src/room/gl/shaders/background.vert create mode 100644 app/src/room/gl/shaders/broadcast.frag create mode 100644 app/src/room/gl/shaders/broadcast.vert diff --git a/app/src/room/audio.ts b/app/src/room/audio.ts index 2daeab84..02e86ba7 100644 --- a/app/src/room/audio.ts +++ b/app/src/room/audio.ts @@ -134,95 +134,14 @@ export class Audio { } } - renderBackground(ctx: CanvasRenderingContext2D) { - ctx.save(); - - const bounds = this.broadcast.bounds.peek(); - - ctx.translate(bounds.position.x, bounds.position.y); - - const RADIUS = 12 * this.broadcast.zoom.peek(); - const PADDING = 12 * this.broadcast.zoom.peek(); - - // Background outline - ctx.beginPath(); - this.#roundedRectPath( - ctx, - -PADDING, - -PADDING, - bounds.size.x + PADDING * 2, - bounds.size.y + PADDING * 2, - RADIUS, - ); - ctx.fillStyle = "#000"; - ctx.fill(); - - ctx.restore(); - } - - render(ctx: CanvasRenderingContext2D) { - // Compute average volume - const analyserBuffer = this.sound.analyze(); - if (!analyserBuffer) return; // undefined in potato mode - - const bounds = this.broadcast.bounds.peek(); - const scale = this.broadcast.zoom.peek(); - - ctx.save(); - ctx.translate(bounds.position.x, bounds.position.y); - - const PADDING = 12 * scale; - const RADIUS = 12 * scale; - - // Take the absolute value of the distance from 128, which is silence. - for (let i = 0; i < this.#analyserBuffer.length; i++) { - analyserBuffer[i] = Math.abs(analyserBuffer[i] - 128); - } - - // If the audio is playing, combine the buffers. - if (this.#analyser) { - if (this.#analyserBuffer.length !== analyserBuffer.length) { - throw new Error("analyser buffer length mismatch"); - } - - this.#analyser.getByteTimeDomainData(this.#analyserBuffer); - for (let i = 0; i < this.#analyserBuffer.length; i++) { - analyserBuffer[i] += Math.abs(this.#analyserBuffer[i] - 128); - } - } - - let sum = 0; - for (let i = 0; i < this.#analyserBuffer.length; i++) { - const sample = analyserBuffer[i]; - sum += sample * sample; - } - const volume = Math.sqrt(sum) / this.#analyserBuffer.length; - this.#volumeSmoothed = this.#volumeSmoothed * 0.7 + volume * 0.3; - - // Colored fill based on volume and speaking state - const expand = PADDING * Math.min(1, this.#volumeSmoothed - 0.01); - - ctx.beginPath(); - this.#roundedRectPath(ctx, -expand, -expand, bounds.size.x + expand * 2, bounds.size.y + expand * 2, RADIUS); - - const hue = 180 + this.#volumeSmoothed * 120; - const alpha = 0.3 + this.#volumeSmoothed * 0.4; - - ctx.fillStyle = `hsla(${hue}, 80%, 45%, ${alpha})`; - ctx.fill(); - - // Ramp up/down the speaking alpha based on the speaking state. - this.#speakingAlpha = Math.max(Math.min(1, this.#speakingAlpha + (this.#speaking ? 0.1 : -0.1)), 0); - - // Add an additional border if we're speaking, ramping up/down the alpha - if (this.#speakingAlpha > 0) { - ctx.strokeStyle = `hsla(${hue}, 80%, 45%, ${this.#speakingAlpha})`; - ctx.lineWidth = 6 * scale; - ctx.stroke(); - } - - ctx.restore(); - } + // TODO: Audio visualization will be implemented with WebGL shaders + // renderBackground(ctx: CanvasRenderingContext2D) { + // // Black background outline + // } + + // render(ctx: CanvasRenderingContext2D) { + // // Audio visualization with colored fill based on volume + // } #roundedRectPath(ctx: CanvasRenderingContext2D, x: number, y: number, w: number, h: number, r: number) { const maxR = Math.min(r, w / 2, h / 2); diff --git a/app/src/room/broadcast.ts b/app/src/room/broadcast.ts index 32dcffbc..f4220e9a 100644 --- a/app/src/room/broadcast.ts +++ b/app/src/room/broadcast.ts @@ -281,78 +281,15 @@ export class Broadcast { return false; } - // Render a locator arrow for our local broadcasts on join - renderLocator(now: DOMHighResTimeStamp, ctx: CanvasRenderingContext2D) { - if (!this.source.enabled.peek()) return; - - if (!this.visible.peek()) { - this.#locatorStart = undefined; - return; - } - - if (!this.#locatorStart) { - this.#locatorStart = now; - } - - const elapsed = now - this.#locatorStart; - const alpha = Math.min(Math.max((7000 - elapsed) / (10000 - 8000), 0), 1); - if (alpha <= 0) { - return; - } - - const bounds = this.bounds.peek(); - - ctx.save(); - ctx.globalAlpha *= alpha; - - // Calculate arrow position and animation - const arrowSize = 12 * this.zoom.peek(); - const pulseScale = 1 + Math.sin(now / 500) * 0.1; // Subtle pulsing effect - const offset = 10 * this.zoom.peek(); - - const gap = 2 * (arrowSize + offset); - - const x = Math.min(Math.max(bounds.position.x + bounds.size.x / 2, 0), ctx.canvas.width); - const y = Math.min(Math.max(bounds.position.y, 2 * gap), ctx.canvas.height); - - ctx.translate(x, y - gap); - ctx.scale(pulseScale, pulseScale); - - ctx.beginPath(); - ctx.moveTo(0, arrowSize); - ctx.lineTo(-arrowSize / 2, 0); - ctx.lineTo(arrowSize / 2, 0); - ctx.closePath(); - - // Style the arrow - ctx.lineWidth = 4 * this.zoom.peek(); - ctx.strokeStyle = "#000"; // Gold color - ctx.fillStyle = "#FFD700"; - ctx.stroke(); - ctx.fill(); - - // Draw "YOU" text - const fontSize = Math.round(32 * this.zoom.peek()); // round to avoid busting font caches - ctx.font = `bold ${fontSize}px Arial`; - ctx.textAlign = "center"; - ctx.textBaseline = "middle"; - ctx.fillStyle = "#FFD700"; - ctx.strokeText("YOU", 0, -arrowSize - offset); - ctx.fillText("YOU", 0, -arrowSize - offset); - - /* - // Add a subtle glow effect - ctx.shadowColor = "#FFD700"; - ctx.shadowBlur = 10 * fontScale; - ctx.stroke(); - */ - - ctx.restore(); - } + // TODO: Implement locator arrow with WebGL + // renderLocator(now: DOMHighResTimeStamp) { + // // Render "YOU" arrow above broadcast + // } close() { this.signals.close(); this.audio.close(); + this.video.close(); this.chat.close(); this.captions.close(); diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index 9a0c5fa3..bb27efc4 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -1,15 +1,8 @@ import { Effect, Signal } from "@kixelated/signals"; import { Vector } from "./geometry"; - -const LINE_SPACING = 32; -const LINE_WIDTH = 5; -const SEGMENTS = 16; -const WOBBLE_AMPLITUDE = 5; -const BEND_AMPLITUDE = 8; -const BEND_PROBABILITY = 0.2; -const WOBBLE_SPEED = 0.0006; -const LINE_OVERDRAW = 4; -const COLOR_SPEED = 0.01; +import { BackgroundRenderer } from "./gl/background"; +import { Camera } from "./gl/camera"; +import { GLContext } from "./gl/context"; export type CanvasProps = { demo?: boolean; @@ -17,10 +10,12 @@ export type CanvasProps = { export class Canvas { #canvas: HTMLCanvasElement; - #context: CanvasRenderingContext2D; + #glContext: GLContext; + #camera: Camera; + #backgroundRenderer: BackgroundRenderer; // Use a callback to render after the background. - onRender?: (ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) => void; + onRender?: (now: DOMHighResTimeStamp) => void; #animate?: number; visible: Signal; @@ -33,20 +28,26 @@ export class Canvas { return this.#canvas; } + get gl() { + return this.#glContext; + } + + get camera() { + return this.#camera; + } + constructor(element: HTMLCanvasElement, props?: CanvasProps) { this.#canvas = element; this.demo = new Signal(props?.demo ?? false); - - const context = this.#canvas.getContext("2d"); - if (!context) { - throw new Error("Failed to get canvas context"); - } - - this.#context = context; this.visible = new Signal(false); this.viewport = new Signal(Vector.create(0, 0)); + // Initialize WebGL2 context + this.#glContext = new GLContext(this.#canvas, this.viewport); + this.#camera = new Camera(); + this.#backgroundRenderer = new BackgroundRenderer(this.#glContext); + const resize = () => { // Check if we're in fullscreen or fixed position const isFullscreen = document.fullscreenElement === this.#canvas; @@ -82,13 +83,18 @@ export class Canvas { this.#canvas.width = newWidth; this.#canvas.height = newHeight; + // Update WebGL viewport + this.#glContext.resize(newWidth, newHeight); + // The internal logic ignores devicePixelRatio because we automatically scale when rendering. - this.viewport.set( - Vector.create( - this.#canvas.width / window.devicePixelRatio, - this.#canvas.height / window.devicePixelRatio, - ), + const viewport = Vector.create( + this.#canvas.width / window.devicePixelRatio, + this.#canvas.height / window.devicePixelRatio, ); + this.viewport.set(viewport); + + // Update camera projection + this.#camera.updateOrtho(viewport); }; let resizeTimeout: ReturnType | undefined; @@ -162,110 +168,33 @@ export class Canvas { } #render(now: DOMHighResTimeStamp) { - const ctx = this.#context; - ctx.imageSmoothingEnabled = true; - ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height); - - // Apply devicePixelRatio scaling once at the start - // This allows all drawing operations to use logical pixels (CSS pixels) - ctx.save(); - ctx.scale(window.devicePixelRatio, window.devicePixelRatio); + // Clear the screen + this.#glContext.clear(); - this.#renderBackground(this.#context, now); + // Render background with shader + this.#backgroundRenderer.render(now); - if (this.demo.peek()) { - this.#renderDemo(this.#context); - } + // TODO: Render demo text if enabled + // if (this.demo.peek()) { + // this.#renderDemo(now); + // } + // Render broadcasts if (this.onRender) { try { - this.onRender(this.#context, now); + this.onRender(now); } catch (err) { console.error("render error", err); } } - // Restore the context to remove the scaling - ctx.restore(); - this.#animate = requestAnimationFrame(this.#render.bind(this)); } - #renderDemo(ctx: CanvasRenderingContext2D) { - ctx.save(); - - // Use logical dimensions (CSS pixels) - const width = ctx.canvas.width / window.devicePixelRatio; - const height = ctx.canvas.height / window.devicePixelRatio; - - const fontSize = Math.round(64); // round to avoid busting font caches - ctx.font = `bold ${fontSize}px sans-serif`; - ctx.fillStyle = "rgba(255, 255, 255, 0.15)"; - ctx.textAlign = "center"; - ctx.textBaseline = "middle"; - - const positions = [ - { x: width * 0.3, y: height * 0.3, angle: -25 }, - { x: width * 0.7, y: height * 0.5, angle: 30 }, - { x: width * 0.5, y: height * 0.7, angle: -15 }, - { x: width * 0.2, y: height * 0.6, angle: 20 }, - { x: width * 0.8, y: height * 0.25, angle: -35 }, - ]; - - for (const pos of positions) { - ctx.save(); - ctx.translate(pos.x, pos.y); - ctx.rotate((pos.angle * Math.PI) / 180); - ctx.fillText("DEMO", 0, 0); - ctx.restore(); - } - - ctx.restore(); - } - - #renderBackground(ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) { - ctx.save(); - - // Use logical dimensions (CSS pixels) - const width = ctx.canvas.width / window.devicePixelRatio; - const height = ctx.canvas.height / window.devicePixelRatio; - - const LINE_COUNT = Math.ceil(height / LINE_SPACING) + LINE_OVERDRAW * 2; - - ctx.lineWidth = LINE_WIDTH; - ctx.lineCap = "round"; - ctx.globalAlpha = 0.25; - - for (let i = 0; i < LINE_COUNT; i++) { - ctx.strokeStyle = lineColor(now, i); - - const baseY = (i - LINE_OVERDRAW) * LINE_SPACING; - const wobble = Math.sin(now * WOBBLE_SPEED + i) * WOBBLE_AMPLITUDE; - - ctx.beginPath(); - - for (let s = 0; s <= SEGMENTS; s++) { - const t = s / SEGMENTS; - const xBase = -100 + t * (width + 200); - const xWobble = Math.sin(now * WOBBLE_SPEED + s + i) * WOBBLE_AMPLITUDE; - const x = xBase + xWobble; - - const seed = (s * 31 + i * 17) % 100; - const bend = seed / 100 < BEND_PROBABILITY ? (seed % 2 === 0 ? 1 : -1) * BEND_AMPLITUDE : 0; - - const y = baseY + wobble + bend + t * 200; - if (s === 0) { - ctx.moveTo(x, y); - } else { - ctx.lineTo(x, y); - } - } - - ctx.stroke(); - } - - ctx.restore(); - } + // TODO: Implement demo text rendering with WebGL + // #renderDemo(now: DOMHighResTimeStamp) { + // // Render "DEMO" text at various positions + // } toggleFullscreen() { if (document.fullscreenElement) { @@ -294,63 +223,6 @@ export class Canvas { close() { this.#signals.close(); + this.#backgroundRenderer.cleanup(); } } - -function lineColor(now: DOMHighResTimeStamp, i: number) { - const hue = (i * 25 + now * COLOR_SPEED) % 360; - return `hsl(${hue}, 75%, 50%)`; -} - -// A node function to output the above as a -export function generateSvg() { - const now = 0; - const WIDTH = 1920; - const HEIGHT = 1080; - - const LINE_COUNT = Math.ceil(HEIGHT / LINE_SPACING) + LINE_OVERDRAW * 2; - - const paths = []; - for (let i = 0; i < LINE_COUNT; i++) { - const color = lineColor(now, i); - const baseY = (i - LINE_OVERDRAW) * LINE_SPACING; - const wobble = Math.sin(now * WOBBLE_SPEED + i) * WOBBLE_AMPLITUDE; - - const commands = []; - - for (let s = 0; s <= SEGMENTS; s++) { - const t = s / SEGMENTS; - const xBase = -100 + t * (WIDTH + 200); - const xWobble = Math.sin(now * WOBBLE_SPEED + s + i) * WOBBLE_AMPLITUDE; - const x = xBase + xWobble; - - const seed = (s * 31 + i * 17) % 100; - const bend = seed / 100 < BEND_PROBABILITY ? (seed % 2 === 0 ? 1 : -1) * BEND_AMPLITUDE : 0; - - const y = baseY + wobble + bend + t * 200; - const cmd = `${s === 0 ? "M" : "L"} ${x.toFixed(1)}, ${y.toFixed(1)}`; - commands.push(cmd); - } - - const d = commands.join(" "); - - paths.push(``); - } - - return ` - - - - ${paths.join("\n")} - - `; -} - -/* UNCOMMENT TO GENERATE SVG -import fs from "node:fs"; - -if (import.meta.url === `file://${process.argv[1]}`) { - fs.writeFileSync("public/image/background.svg", generateSvg()); - console.log("SVG written to public/image/background.svg"); -} -*/ diff --git a/app/src/room/gl/background.ts b/app/src/room/gl/background.ts new file mode 100644 index 00000000..578a83e9 --- /dev/null +++ b/app/src/room/gl/background.ts @@ -0,0 +1,70 @@ +import type { GLContext } from "./context"; +import { ShaderProgram } from "./shader"; +import backgroundVertSource from "./shaders/background.vert?raw"; +import backgroundFragSource from "./shaders/background.frag?raw"; + +export class BackgroundRenderer { + #glContext: GLContext; + #program: ShaderProgram; + #vao: WebGLVertexArrayObject; + #positionBuffer: WebGLBuffer; + + constructor(glContext: GLContext) { + this.#glContext = glContext; + const gl = glContext.gl; + + this.#program = new ShaderProgram(gl, backgroundVertSource, backgroundFragSource); + + const vao = gl.createVertexArray(); + if (!vao) throw new Error("Failed to create VAO"); + this.#vao = vao; + + const positionBuffer = gl.createBuffer(); + if (!positionBuffer) throw new Error("Failed to create position buffer"); + this.#positionBuffer = positionBuffer; + + this.#setupQuad(); + } + + #setupQuad() { + const gl = this.#glContext.gl; + + // Fullscreen quad vertices (clip space) + const positions = new Float32Array([ + -1.0, -1.0, + 1.0, -1.0, + -1.0, 1.0, + 1.0, 1.0, + ]); + + gl.bindVertexArray(this.#vao); + gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW); + + const posLoc = this.#program.getAttribute("a_position"); + gl.enableVertexAttribArray(posLoc); + gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0); + + gl.bindVertexArray(null); + } + + render(now: DOMHighResTimeStamp) { + const gl = this.#glContext.gl; + const viewport = this.#glContext.viewport.peek(); + + this.#program.use(); + this.#program.setUniform2f("u_resolution", viewport.x, viewport.y); + this.#program.setUniform1f("u_time", now); + + gl.bindVertexArray(this.#vao); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.bindVertexArray(null); + } + + cleanup() { + const gl = this.#glContext.gl; + gl.deleteVertexArray(this.#vao); + gl.deleteBuffer(this.#positionBuffer); + this.#program.cleanup(); + } +} diff --git a/app/src/room/gl/broadcast-renderer.ts b/app/src/room/gl/broadcast-renderer.ts new file mode 100644 index 00000000..243cab53 --- /dev/null +++ b/app/src/room/gl/broadcast-renderer.ts @@ -0,0 +1,172 @@ +import type { Broadcast } from "../broadcast"; +import type { Camera } from "./camera"; +import type { GLContext } from "./context"; +import { ShaderProgram } from "./shader"; +import broadcastVertSource from "./shaders/broadcast.vert?raw"; +import broadcastFragSource from "./shaders/broadcast.frag?raw"; + +export class BroadcastRenderer { + #glContext: GLContext; + #program: ShaderProgram; + #vao: WebGLVertexArrayObject; + #positionBuffer: WebGLBuffer; + #texCoordBuffer: WebGLBuffer; + #indexBuffer: WebGLBuffer; + + constructor(glContext: GLContext) { + this.#glContext = glContext; + const gl = glContext.gl; + + this.#program = new ShaderProgram(gl, broadcastVertSource, broadcastFragSource); + + const vao = gl.createVertexArray(); + if (!vao) throw new Error("Failed to create VAO"); + this.#vao = vao; + + const positionBuffer = gl.createBuffer(); + if (!positionBuffer) throw new Error("Failed to create position buffer"); + this.#positionBuffer = positionBuffer; + + const texCoordBuffer = gl.createBuffer(); + if (!texCoordBuffer) throw new Error("Failed to create texCoord buffer"); + this.#texCoordBuffer = texCoordBuffer; + + const indexBuffer = gl.createBuffer(); + if (!indexBuffer) throw new Error("Failed to create index buffer"); + this.#indexBuffer = indexBuffer; + + this.#setupBuffers(); + } + + #setupBuffers() { + const gl = this.#glContext.gl; + + // Quad vertices (0-1 range, will be scaled by bounds) + const positions = new Float32Array([ + 0, + 0, // Top-left + 1, + 0, // Top-right + 1, + 1, // Bottom-right + 0, + 1, // Bottom-left + ]); + + // Texture coordinates + const texCoords = new Float32Array([ + 0, + 0, // Top-left + 1, + 0, // Top-right + 1, + 1, // Bottom-right + 0, + 1, // Bottom-left + ]); + + // Indices for two triangles + const indices = new Uint16Array([0, 1, 2, 0, 2, 3]); + + gl.bindVertexArray(this.#vao); + + // Position attribute + gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW); + const posLoc = this.#program.getAttribute("a_position"); + gl.enableVertexAttribArray(posLoc); + gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0); + + // TexCoord attribute + gl.bindBuffer(gl.ARRAY_BUFFER, this.#texCoordBuffer); + gl.bufferData(gl.ARRAY_BUFFER, texCoords, gl.STATIC_DRAW); + const texCoordLoc = this.#program.getAttribute("a_texCoord"); + gl.enableVertexAttribArray(texCoordLoc); + gl.vertexAttribPointer(texCoordLoc, 2, gl.FLOAT, false, 0, 0); + + // Index buffer + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.#indexBuffer); + gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW); + + gl.bindVertexArray(null); + } + + render( + broadcast: Broadcast, + camera: Camera, + maxZ: number, + modifiers?: { + dragging?: boolean; + hovering?: boolean; + }, + ) { + const gl = this.#glContext.gl; + const bounds = broadcast.bounds.peek(); + const scale = broadcast.zoom.peek(); + + this.#program.use(); + + // Set projection matrix + this.#program.setUniformMatrix4fv("u_projection", camera.projection); + + // Set bounds (x, y, width, height) + this.#program.setUniform4f("u_bounds", bounds.position.x, bounds.position.y, bounds.size.x, bounds.size.y); + + // Set depth based on z-index + const depth = camera.zToDepth(broadcast.position.peek().z, maxZ); + this.#program.setUniform1f("u_depth", depth); + + // Set radius for rounded corners + const radius = 12 * scale; + this.#program.setUniform1f("u_radius", radius); + + // Set size for SDF calculation + this.#program.setUniform2f("u_size", bounds.size.x, bounds.size.y); + + // Set opacity + let opacity = broadcast.video.online; + if (modifiers?.dragging) { + opacity *= 0.7; + } + this.#program.setUniform1f("u_opacity", opacity); + + // Set avatar transition (0 = avatar, 1 = video) + this.#program.setUniform1f("u_avatarTransition", broadcast.video.avatarTransition); + + // Bind video texture if available + const texture = broadcast.video.texture; + if (texture) { + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, texture); + this.#program.setUniform1i("u_texture", 0); + this.#program.setUniform1i("u_hasTexture", 1); + } else { + this.#program.setUniform1i("u_hasTexture", 0); + } + + // Bind avatar texture if available + const avatarTexture = broadcast.video.avatarTexture; + if (avatarTexture) { + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, avatarTexture); + this.#program.setUniform1i("u_avatarTexture", 1); + this.#program.setUniform1i("u_hasAvatar", 1); + } else { + this.#program.setUniform1i("u_hasAvatar", 0); + } + + // Draw + gl.bindVertexArray(this.#vao); + gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0); + gl.bindVertexArray(null); + } + + cleanup() { + const gl = this.#glContext.gl; + gl.deleteVertexArray(this.#vao); + gl.deleteBuffer(this.#positionBuffer); + gl.deleteBuffer(this.#texCoordBuffer); + gl.deleteBuffer(this.#indexBuffer); + this.#program.cleanup(); + } +} diff --git a/app/src/room/gl/camera.ts b/app/src/room/gl/camera.ts new file mode 100644 index 00000000..aaadbca7 --- /dev/null +++ b/app/src/room/gl/camera.ts @@ -0,0 +1,49 @@ +import { Vector } from "../geometry"; + +export class Camera { + projection: Float32Array; + + constructor() { + this.projection = new Float32Array(16); + } + + // Create a 2D orthographic projection matrix + updateOrtho(viewport: Vector) { + const left = 0; + const right = viewport.x; + const bottom = viewport.y; + const top = 0; + const near = -100; // Allow some depth for z-index + const far = 100; + + // Column-major order for WebGL + this.projection[0] = 2 / (right - left); + this.projection[1] = 0; + this.projection[2] = 0; + this.projection[3] = 0; + + this.projection[4] = 0; + this.projection[5] = 2 / (top - bottom); + this.projection[6] = 0; + this.projection[7] = 0; + + this.projection[8] = 0; + this.projection[9] = 0; + this.projection[10] = 2 / (near - far); + this.projection[11] = 0; + + this.projection[12] = (left + right) / (left - right); + this.projection[13] = (bottom + top) / (bottom - top); + this.projection[14] = (near + far) / (near - far); + this.projection[15] = 1; + } + + // Convert z-index to depth value + // Higher z-index = closer to camera (lower depth value) + zToDepth(z: number, maxZ: number): number { + // Normalize z-index to 0-1 range, then map to depth range + // We use a small range to keep everything mostly 2D + const normalized = maxZ > 0 ? z / maxZ : 0; + return -normalized * 0.01; // Small depth range for subtle effect + } +} diff --git a/app/src/room/gl/context.ts b/app/src/room/gl/context.ts new file mode 100644 index 00000000..ff5e3ee5 --- /dev/null +++ b/app/src/room/gl/context.ts @@ -0,0 +1,44 @@ +import { Signal } from "@kixelated/signals"; +import { Vector } from "../geometry"; + +export class GLContext { + gl: WebGL2RenderingContext; + canvas: HTMLCanvasElement; + viewport: Signal; + + constructor(canvas: HTMLCanvasElement, viewport: Signal) { + const gl = canvas.getContext("webgl2", { + alpha: false, + antialias: true, + depth: true, + premultipliedAlpha: false, + }); + + if (!gl) { + throw new Error("WebGL2 not supported"); + } + + this.gl = gl; + this.canvas = canvas; + this.viewport = viewport; + + // Enable depth testing for z-index ordering + gl.enable(gl.DEPTH_TEST); + gl.depthFunc(gl.LEQUAL); + + // Enable blending for transparency + gl.enable(gl.BLEND); + gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA); + } + + clear() { + const gl = this.gl; + gl.clearColor(0, 0, 0, 1); + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + } + + resize(width: number, height: number) { + const gl = this.gl; + gl.viewport(0, 0, width, height); + } +} diff --git a/app/src/room/gl/shader.ts b/app/src/room/gl/shader.ts new file mode 100644 index 00000000..f9624515 --- /dev/null +++ b/app/src/room/gl/shader.ts @@ -0,0 +1,113 @@ +export class ShaderProgram { + gl: WebGL2RenderingContext; + program: WebGLProgram; + uniforms: Map; + attributes: Map; + + constructor(gl: WebGL2RenderingContext, vertexSource: string, fragmentSource: string) { + this.gl = gl; + this.uniforms = new Map(); + this.attributes = new Map(); + + const vertexShader = this.#compileShader(gl.VERTEX_SHADER, vertexSource); + const fragmentShader = this.#compileShader(gl.FRAGMENT_SHADER, fragmentSource); + + const program = gl.createProgram(); + if (!program) { + throw new Error("Failed to create shader program"); + } + + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + const info = gl.getProgramInfoLog(program); + gl.deleteProgram(program); + throw new Error(`Shader program link failed: ${info}`); + } + + this.program = program; + + // Clean up shaders after linking + gl.deleteShader(vertexShader); + gl.deleteShader(fragmentShader); + } + + #compileShader(type: number, source: string): WebGLShader { + const gl = this.gl; + const shader = gl.createShader(type); + if (!shader) { + throw new Error("Failed to create shader"); + } + + gl.shaderSource(shader, source); + gl.compileShader(shader); + + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { + const info = gl.getShaderInfoLog(shader); + gl.deleteShader(shader); + throw new Error(`Shader compilation failed: ${info}`); + } + + return shader; + } + + use() { + this.gl.useProgram(this.program); + } + + getUniform(name: string): WebGLUniformLocation { + let location = this.uniforms.get(name); + if (location === undefined) { + const loc = this.gl.getUniformLocation(this.program, name); + if (!loc) { + throw new Error(`Uniform ${name} not found`); + } + this.uniforms.set(name, loc); + location = loc; + } + return location; + } + + getAttribute(name: string): number { + let location = this.attributes.get(name); + if (location === undefined) { + const loc = this.gl.getAttribLocation(this.program, name); + if (loc === -1) { + throw new Error(`Attribute ${name} not found`); + } + this.attributes.set(name, loc); + location = loc; + } + return location; + } + + setUniform1f(name: string, value: number) { + this.gl.uniform1f(this.getUniform(name), value); + } + + setUniform2f(name: string, x: number, y: number) { + this.gl.uniform2f(this.getUniform(name), x, y); + } + + setUniform3f(name: string, x: number, y: number, z: number) { + this.gl.uniform3f(this.getUniform(name), x, y, z); + } + + setUniform4f(name: string, x: number, y: number, z: number, w: number) { + this.gl.uniform4f(this.getUniform(name), x, y, z, w); + } + + setUniform1i(name: string, value: number) { + this.gl.uniform1i(this.getUniform(name), value); + } + + setUniformMatrix4fv(name: string, value: Float32Array) { + this.gl.uniformMatrix4fv(this.getUniform(name), false, value); + } + + cleanup() { + this.gl.deleteProgram(this.program); + } +} diff --git a/app/src/room/gl/shaders/background.frag b/app/src/room/gl/shaders/background.frag new file mode 100644 index 00000000..81e660e9 --- /dev/null +++ b/app/src/room/gl/shaders/background.frag @@ -0,0 +1,80 @@ +#version 300 es +precision highp float; + +in vec2 v_pixel; +out vec4 fragColor; + +uniform vec2 u_resolution; +uniform float u_time; + +const float LINE_SPACING = 28.0; +const float LINE_WIDTH = 4.0; +const float WOBBLE_AMPLITUDE = 5.0; +const float WOBBLE_SPEED = 0.0004; + +const float SEGMENT_WIDTH = 120.0; // Pixels per segment + +// Hash function for deterministic randomness +float hash(float n) { + return fract(sin(n) * 43758.5453123); +} + +// Convert HSL to RGB +vec3 hsl2rgb(float h, float s, float l) { + float c = (1.0 - abs(2.0 * l - 1.0)) * s; + float x = c * (1.0 - abs(mod(h / 60.0, 2.0) - 1.0)); + float m = l - c / 2.0; + + vec3 rgb; + if (h < 60.0) rgb = vec3(c, x, 0.0); + else if (h < 120.0) rgb = vec3(x, c, 0.0); + else if (h < 180.0) rgb = vec3(0.0, c, x); + else if (h < 240.0) rgb = vec3(0.0, x, c); + else if (h < 300.0) rgb = vec3(x, 0.0, c); + else rgb = vec3(c, 0.0, x); + + return rgb + m; +} + +void main() { + // Work in simple horizontal line space - rotation happens in vertex shader + vec2 pos = v_pixel; + + // Determine which horizontal segment we're in + float segmentIndex = floor(pos.x / SEGMENT_WIDTH); + + // Determine which line this pixel belongs to + float lineIndex = floor(pos.y / LINE_SPACING); + + // Get wobble offsets at segment boundaries + float seedStart = segmentIndex * 31.0 + lineIndex * 17.0; + float seedEnd = (segmentIndex + 1.0) * 31.0 + lineIndex * 17.0; + + float wobbleStart = hash(seedStart) * WOBBLE_AMPLITUDE * 2.0 - WOBBLE_AMPLITUDE; + float wobbleEnd = hash(seedEnd) * WOBBLE_AMPLITUDE * 2.0 - WOBBLE_AMPLITUDE; + + // Add animated wobble + wobbleStart += sin(u_time * WOBBLE_SPEED + lineIndex + segmentIndex) * WOBBLE_AMPLITUDE; + wobbleEnd += sin(u_time * WOBBLE_SPEED + lineIndex + segmentIndex + 1.0) * WOBBLE_AMPLITUDE; + + // Interpolate wobble within segment + float segmentT = mod(pos.x, SEGMENT_WIDTH) / SEGMENT_WIDTH; + float wobble = mix(wobbleStart, wobbleEnd, segmentT); + + // Calculate the line position (centered in the bucket) + float baseLineY = lineIndex * LINE_SPACING + LINE_SPACING * 0.5; + float lineY = baseLineY + wobble; + + // Distance from this pixel to the line + float dist = abs(pos.y - lineY); + + // Calculate line color + float hue = mod(lineIndex * 25.0 + u_time * 0.01, 360.0); + vec3 lineColor = hsl2rgb(hue, 0.75, 0.5); + + // Anti-aliased lines with feathering + float lineAlpha = 1.0 - smoothstep(LINE_WIDTH * 0.25, LINE_WIDTH * 1.25, dist); + lineAlpha *= 0.5; + + fragColor = vec4(lineColor * lineAlpha, lineAlpha); +} diff --git a/app/src/room/gl/shaders/background.vert b/app/src/room/gl/shaders/background.vert new file mode 100644 index 00000000..5275585a --- /dev/null +++ b/app/src/room/gl/shaders/background.vert @@ -0,0 +1,29 @@ +#version 300 es + +in vec2 a_position; + +out vec2 v_pixel; + +uniform vec2 u_resolution; + +void main() { + // Rotate the entire quad in clip space + float angle = -0.25; + float cosA = cos(angle); + float sinA = sin(angle); + vec2 rotatedPos = vec2( + a_position.x * cosA - a_position.y * sinA, + a_position.x * sinA + a_position.y * cosA + ); + + // Scale rotated quad to ensure it covers the viewport + // sqrt(2) ~= 1.42 ensures rotated square covers original square + rotatedPos *= 1.5; + + // Convert to pixel coordinates for fragment shader (unrotated logical space) + vec2 uv = a_position * 0.5 + 0.5; + v_pixel = uv * u_resolution; + + // Place background far back in depth (z = 1.0 in clip space) + gl_Position = vec4(rotatedPos, 1.0, 1.0); +} diff --git a/app/src/room/gl/shaders/broadcast.frag b/app/src/room/gl/shaders/broadcast.frag new file mode 100644 index 00000000..e7e9e80b --- /dev/null +++ b/app/src/room/gl/shaders/broadcast.frag @@ -0,0 +1,47 @@ +#version 300 es +precision highp float; + +in vec2 v_texCoord; +in vec2 v_pos; + +uniform sampler2D u_texture; +uniform sampler2D u_avatarTexture; +uniform bool u_hasTexture; +uniform bool u_hasAvatar; +uniform float u_radius; +uniform vec2 u_size; +uniform float u_opacity; +uniform float u_avatarTransition; // 0 = avatar, 1 = video + +out vec4 fragColor; + +// Signed distance function for rounded rectangle +float roundedBoxSDF(vec2 center, vec2 size, float radius) { + vec2 q = abs(center) - size + radius; + return min(max(q.x, q.y), 0.0) + length(max(q, 0.0)) - radius; +} + +void main() { + // Calculate position from center + vec2 center = (v_pos - 0.5) * u_size; + + // Calculate SDF for rounded corners + float dist = roundedBoxSDF(center, u_size * 0.5, u_radius); + + // Discard pixels outside the rounded rectangle + if (dist > 0.0) { + discard; + } + + // Smooth edge antialiasing + float alpha = 1.0 - smoothstep(-1.0, 0.0, dist); + + // Sample textures + vec4 videoColor = u_hasTexture ? texture(u_texture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + vec4 avatarColor = u_hasAvatar ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + + // Blend between avatar and video based on transition + vec4 texColor = mix(avatarColor, videoColor, u_avatarTransition); + + fragColor = vec4(texColor.rgb, texColor.a * alpha * u_opacity); +} diff --git a/app/src/room/gl/shaders/broadcast.vert b/app/src/room/gl/shaders/broadcast.vert new file mode 100644 index 00000000..dcd10e9d --- /dev/null +++ b/app/src/room/gl/shaders/broadcast.vert @@ -0,0 +1,22 @@ +#version 300 es + +in vec2 a_position; +in vec2 a_texCoord; + +uniform mat4 u_projection; +uniform vec4 u_bounds; // x, y, width, height +uniform float u_depth; + +out vec2 v_texCoord; +out vec2 v_pos; // Position within the quad (0-1) + +void main() { + // Scale and translate to bounds + vec2 pos = a_position * u_bounds.zw + u_bounds.xy; + + // Apply projection + gl_Position = u_projection * vec4(pos, u_depth, 1.0); + + v_texCoord = a_texCoord; + v_pos = a_position; +} diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 33489ecd..3748f88f 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -3,6 +3,7 @@ import { Effect, Signal } from "@kixelated/signals"; import { Broadcast, BroadcastSource } from "./broadcast"; import type { Canvas } from "./canvas"; import { Vector } from "./geometry"; +import { BroadcastRenderer } from "./gl/broadcast-renderer"; import type { Sound } from "./sound"; export type SpaceProps = { @@ -30,6 +31,9 @@ export class Space { #maxZ = 0; + // WebGL renderer + #broadcastRenderer: BroadcastRenderer; + // Touch handling for mobile #touches = new Map(); #pinchStartDistance = 0; @@ -42,6 +46,9 @@ export class Space { this.sound = sound; this.profile = props?.profile ?? false; + // Initialize WebGL renderer + this.#broadcastRenderer = new BroadcastRenderer(canvas.gl); + // Use the new eventListener helper that automatically handles cleanup this.#signals.event(canvas.element, "mousedown", this.#onMouseDown.bind(this)); this.#signals.event(canvas.element, "mousemove", this.#onMouseMove.bind(this)); @@ -56,11 +63,17 @@ export class Space { this.#signals.effect(this.#runScale.bind(this)); - // This is a bit of a hack, but register our render method. - this.canvas.onRender = this.#tick.bind(this); + // Register tick and render methods separately + this.canvas.onRender = this.#render.bind(this); this.#signals.cleanup(() => { this.canvas.onRender = undefined; }); + + // Run tick separately from render at 60fps + this.#signals.effect((effect) => { + const interval = setInterval(() => this.#tickAll(), 1000 / 60); + effect.cleanup(() => clearInterval(interval)); + }); } #onMouseDown(e: MouseEvent) { @@ -385,6 +398,9 @@ export class Space { add(id: string, source: BroadcastSource): Broadcast { const broadcast = new Broadcast({ source, canvas: this.canvas, sound: this.sound, scale: this.#scale }); + // Set GL context for video texture uploads + broadcast.video.setGLContext(this.canvas.gl.gl); + // Put new broadcasts on top of the stack. // NOTE: This is not sent over the network. broadcast.position.update((prev) => ({ @@ -495,7 +511,8 @@ export class Space { return all; } - #tick(ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) { + // Tick physics separately from rendering + #tickAll() { for (const broadcast of this.#rip) { broadcast.tick(); } @@ -534,89 +551,60 @@ export class Space { b.velocity = b.velocity.sub(force); } } - - this.#render(ctx, now); } - #render(ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) { - // Render the audio click prompt if audio is suspended - if (this.sound.suspended.peek() && !this.profile) { - this.#renderAudioPrompt(ctx); - } + // Render using WebGL + #render(now: DOMHighResTimeStamp) { + // TODO: Render the audio click prompt if audio is suspended + // if (this.sound.suspended.peek() && !this.profile) { + // this.#renderAudioPrompt(); + // } const broadcasts = this.ordered.peek(); - for (const broadcast of broadcasts) { - broadcast.audio.renderBackground(ctx); - } - for (const broadcast of broadcasts) { - broadcast.audio.render(ctx); - } + // TODO: Render audio visualization backgrounds + // for (const broadcast of broadcasts) { + // // Audio background rendering + // } + + // TODO: Render audio visualization + // for (const broadcast of broadcasts) { + // // Audio visualization rendering + // } - // Broadcasts fading out don't have collision so they're in a separate structure. + // Render broadcasts fading out for (const broadcast of this.#rip) { - broadcast.video.render(now, ctx); + this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ); } + // Render all broadcasts (except dragging) for (const broadcast of broadcasts) { if (this.#dragging !== broadcast) { - ctx.save(); - broadcast.video.render(now, ctx, { + this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, { hovering: this.#hovering === broadcast || this.profile, }); - ctx.restore(); } } - // Render the dragging broadcast last so it's always on top. + // Render the dragging broadcast last so it's always on top if (this.#dragging) { - ctx.save(); - ctx.fillStyle = "rgba(0, 0, 0, 0.5)"; - this.#dragging.video.render(now, ctx, { dragging: true }); - ctx.restore(); + this.#broadcastRenderer.render(this.#dragging, this.canvas.camera, this.#maxZ, { + dragging: true, + }); } - // Render the locator arrows for our broadcasts on join. - for (const broadcast of broadcasts) { - if (broadcast.source instanceof Publish.Broadcast) { - broadcast.renderLocator(now, ctx); - } - } + // TODO: Render the locator arrows for our broadcasts on join + // for (const broadcast of broadcasts) { + // if (broadcast.source instanceof Publish.Broadcast) { + // broadcast.renderLocator(now); + // } + // } } - #renderAudioPrompt(ctx: CanvasRenderingContext2D) { - ctx.save(); - - // Use logical dimensions (CSS pixels) - const width = ctx.canvas.width / window.devicePixelRatio; - const padding = 30; - const boxWidth = 400; - const height = 80; - const y = ctx.canvas.height / window.devicePixelRatio - height - padding; - const x = (width - boxWidth) / 2; - const borderRadius = 16; - - // Rounded rectangle with thick black border - ctx.fillStyle = "rgba(0, 0, 0, 0.9)"; - ctx.beginPath(); - ctx.roundRect(x, y, boxWidth, height, borderRadius); - ctx.fill(); - - // Thick border - ctx.strokeStyle = "rgba(0, 0, 0, 1)"; - ctx.lineWidth = 6; - ctx.stroke(); - - // Text - const fontSize = Math.round(24); // round to avoid busting font caches - ctx.font = `${fontSize}px sans-serif`; - ctx.fillStyle = "rgba(255, 255, 255, 0.75)"; - ctx.textAlign = "center"; - ctx.textBaseline = "middle"; - ctx.fillText("🔊 Click to enable audio", width * 0.5, y + height / 2); - - ctx.restore(); - } + // TODO: Implement audio prompt with WebGL or DOM overlay + // #renderAudioPrompt() { + // // "🔊 Click to enable audio" + // } #runScale(effect: Effect) { const broadcasts = effect.get(this.ordered); @@ -654,6 +642,9 @@ export class Space { this.#rip = []; this.ordered.set([]); this.lookup.clear(); + + // Cleanup WebGL resources + this.#broadcastRenderer.cleanup(); } // Publish the current position to the network. diff --git a/app/src/room/video.ts b/app/src/room/video.ts index f4ea7271..b9cae4ea 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -34,6 +34,11 @@ export class Video { #memeOpacity = 0; #nameOpacity = 0; + // WebGL textures for this broadcast + texture?: WebGLTexture; // Video texture + avatarTexture?: WebGLTexture; // Avatar texture + #gl?: WebGL2RenderingContext; + constructor(broadcast: Broadcast) { this.broadcast = broadcast; this.broadcast.signals.effect(this.#runAvatar.bind(this)); @@ -41,6 +46,22 @@ export class Video { this.broadcast.signals.effect(this.#runFrame.bind(this)); } + setGLContext(gl: WebGL2RenderingContext) { + this.#gl = gl; + + // Create the textures + this.texture = gl.createTexture(); + if (!this.texture) throw new Error("Failed to create video texture"); + + this.avatarTexture = gl.createTexture(); + if (!this.avatarTexture) throw new Error("Failed to create avatar texture"); + + // Set up texture upload effects + this.broadcast.signals.effect(this.#uploadVideoTexture.bind(this)); + this.broadcast.signals.effect(this.#uploadAvatarTexture.bind(this)); + // TODO: Add meme texture upload effect + } + #runAvatar(effect: Effect) { let avatar = effect.get(this.broadcast.source.user.avatar); if (!avatar) { @@ -53,11 +74,28 @@ export class Video { // TODO only set the avatar if it successfully loads const newAvatar = new Image(); + + // For SVGs, load at higher resolution to avoid pixelation + // Set a reasonable size (e.g., 512x512) for better quality + if (avatar.endsWith('.svg')) { + newAvatar.width = 512; + newAvatar.height = 512; + } + newAvatar.src = avatar; const load = () => { this.avatar = newAvatar; - this.avatarSize.set(Vector.create(newAvatar.width, newAvatar.height)); + this.avatarSize.set(Vector.create(newAvatar.naturalWidth || newAvatar.width, newAvatar.naturalHeight || newAvatar.height)); + + // Upload avatar texture after it loads + if (this.#gl && this.avatarTexture) { + try { + this.#uploadImageToAvatarTexture(this.avatar); + } catch (err) { + console.error("Failed to upload avatar texture on load:", err); + } + } }; effect.event(newAvatar, "load", load); @@ -99,6 +137,89 @@ export class Video { } } + // Effect: Upload video frame to texture when it changes + #uploadVideoTexture(effect: Effect) { + if (!this.#gl || !this.texture) return; + + // Listen to the actual video frame signal from the source + let frame: VideoFrame | HTMLVideoElement | undefined; + + if (this.broadcast.source instanceof FakeBroadcast) { + frame = effect.get(this.broadcast.source.video.frame); + } else { + frame = effect.get(this.broadcast.source.video.frame); + } + + if (!frame) { + // No video frame available + return; + } + + try { + if (frame instanceof VideoFrame) { + this.#uploadVideoFrameToTexture(frame); + } else if (frame instanceof HTMLVideoElement) { + this.#uploadVideoElementToTexture(frame); + } + } catch (err) { + console.error("Failed to upload video texture:", err); + } + } + + // Effect: Upload avatar to texture when it loads + #uploadAvatarTexture(effect: Effect) { + if (!this.#gl || !this.avatarTexture) return; + + // Listen to the avatar signal + const avatarUrl = effect.get(this.broadcast.source.user.avatar); + if (!avatarUrl) return; + + // Wait for the image to load + if (!this.avatar.complete || this.avatar.src !== avatarUrl) { + return; + } + + try { + this.#uploadImageToAvatarTexture(this.avatar); + } catch (err) { + console.error("Failed to upload avatar texture:", err); + } + } + + #uploadVideoFrameToTexture(frame: VideoFrame) { + const gl = this.#gl!; + gl.bindTexture(gl.TEXTURE_2D, this.texture); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, frame); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.bindTexture(gl.TEXTURE_2D, null); + } + + #uploadVideoElementToTexture(video: HTMLVideoElement) { + const gl = this.#gl!; + gl.bindTexture(gl.TEXTURE_2D, this.texture); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.bindTexture(gl.TEXTURE_2D, null); + } + + #uploadImageToAvatarTexture(image: HTMLImageElement) { + const gl = this.#gl!; + gl.bindTexture(gl.TEXTURE_2D, this.avatarTexture); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image); + gl.generateMipmap(gl.TEXTURE_2D); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.bindTexture(gl.TEXTURE_2D, null); + } + tick() { if (this.frame) { this.avatarTransition = Math.min(this.avatarTransition + 0.05, 1); @@ -118,7 +239,22 @@ export class Video { */ } - // Try to avoid any mutations in this function; do it in tick instead. + close() { + if (this.#gl) { + if (this.texture) { + this.#gl.deleteTexture(this.texture); + this.texture = undefined; + } + if (this.avatarTexture) { + this.#gl.deleteTexture(this.avatarTexture); + this.avatarTexture = undefined; + } + } + } + + // TODO: Rendering is now handled by WebGL in space.ts + // This method is kept for reference but will be removed + /* render( _now: DOMHighResTimeStamp, ctx: CanvasRenderingContext2D, @@ -353,4 +489,5 @@ export class Video { ctx.restore(); } + */ } diff --git a/app/vite.config.ts b/app/vite.config.ts index e4c46e38..b0c73b8f 100644 --- a/app/vite.config.ts +++ b/app/vite.config.ts @@ -28,6 +28,8 @@ export default defineConfig(({ mode }) => { format: "es" as const, }, + assetsInclude: ["**/*.glsl", "**/*.vert", "**/*.frag"], + plugins: [ solid(), tailwindcss(), diff --git a/flake.nix b/flake.nix index 85f875b8..0b70782d 100644 --- a/flake.nix +++ b/flake.nix @@ -33,6 +33,9 @@ # Icon generation tools imagemagick libicns # provides png2icns + + # Shader validation + glslang ]; }; diff --git a/moq b/moq index 0a6101da..c4ee0b6b 160000 --- a/moq +++ b/moq @@ -1 +1 @@ -Subproject commit 0a6101daae9e363a99f9507c683502aa684d811c +Subproject commit c4ee0b6bdb83c4aea3f028b35f03ff51f5ca719f From e0b82e43ac17f0c3416df075c92ee190e1402ce5 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 11:27:45 -0700 Subject: [PATCH 02/19] Initial webGL --- app/package.json | 3 +- app/src/room/canvas.ts | 4 +- app/src/room/gl/background.ts | 34 ++-- .../{broadcast-renderer.ts => broadcast.ts} | 104 +++++++---- app/src/room/gl/shader.ts | 163 +++++++++++++----- app/src/room/space.ts | 7 +- app/src/room/video.ts | 48 ++---- 7 files changed, 235 insertions(+), 128 deletions(-) rename app/src/room/gl/{broadcast-renderer.ts => broadcast.ts} (54%) diff --git a/app/package.json b/app/package.json index 5f15accd..12a1605f 100644 --- a/app/package.json +++ b/app/package.json @@ -7,7 +7,8 @@ "scripts": { "dev": "bunx --bun vite --open", "build": "bunx --bun vite build", - "check": "tsc --noEmit && biome check", + "check": "tsc --noEmit && biome check && bun run check:shaders", + "check:shaders": "glslangValidator src/room/gl/shaders/*.vert src/room/gl/shaders/*.frag", "fix": "biome check --fix", "tauri": "tauri" }, diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index bb27efc4..9ab0e1c0 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -28,8 +28,8 @@ export class Canvas { return this.#canvas; } - get gl() { - return this.#glContext; + get gl(): WebGL2RenderingContext { + return this.#glContext.gl; } get camera() { diff --git a/app/src/room/gl/background.ts b/app/src/room/gl/background.ts index 578a83e9..f4c6ada1 100644 --- a/app/src/room/gl/background.ts +++ b/app/src/room/gl/background.ts @@ -1,19 +1,29 @@ import type { GLContext } from "./context"; -import { ShaderProgram } from "./shader"; -import backgroundVertSource from "./shaders/background.vert?raw"; +import { Attribute, Shader, Uniform1f, Uniform2f } from "./shader"; import backgroundFragSource from "./shaders/background.frag?raw"; +import backgroundVertSource from "./shaders/background.vert?raw"; export class BackgroundRenderer { #glContext: GLContext; - #program: ShaderProgram; + #program: Shader; #vao: WebGLVertexArrayObject; #positionBuffer: WebGLBuffer; + // Typed uniforms and attributes + #u_resolution: Uniform2f; + #u_time: Uniform1f; + #a_position: Attribute; + constructor(glContext: GLContext) { this.#glContext = glContext; const gl = glContext.gl; - this.#program = new ShaderProgram(gl, backgroundVertSource, backgroundFragSource); + this.#program = new Shader(gl, backgroundVertSource, backgroundFragSource); + + // Initialize typed uniforms and attributes + this.#u_resolution = this.#program.createUniform2f("u_resolution"); + this.#u_time = this.#program.createUniform1f("u_time"); + this.#a_position = this.#program.createAttribute("a_position"); const vao = gl.createVertexArray(); if (!vao) throw new Error("Failed to create VAO"); @@ -30,20 +40,14 @@ export class BackgroundRenderer { const gl = this.#glContext.gl; // Fullscreen quad vertices (clip space) - const positions = new Float32Array([ - -1.0, -1.0, - 1.0, -1.0, - -1.0, 1.0, - 1.0, 1.0, - ]); + const positions = new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]); gl.bindVertexArray(this.#vao); gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer); gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW); - const posLoc = this.#program.getAttribute("a_position"); - gl.enableVertexAttribArray(posLoc); - gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0); + gl.enableVertexAttribArray(this.#a_position.location); + gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0); gl.bindVertexArray(null); } @@ -53,8 +57,8 @@ export class BackgroundRenderer { const viewport = this.#glContext.viewport.peek(); this.#program.use(); - this.#program.setUniform2f("u_resolution", viewport.x, viewport.y); - this.#program.setUniform1f("u_time", now); + this.#u_resolution.set(viewport.x, viewport.y); + this.#u_time.set(now); gl.bindVertexArray(this.#vao); gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); diff --git a/app/src/room/gl/broadcast-renderer.ts b/app/src/room/gl/broadcast.ts similarity index 54% rename from app/src/room/gl/broadcast-renderer.ts rename to app/src/room/gl/broadcast.ts index 243cab53..f481a958 100644 --- a/app/src/room/gl/broadcast-renderer.ts +++ b/app/src/room/gl/broadcast.ts @@ -1,37 +1,69 @@ import type { Broadcast } from "../broadcast"; +import { Canvas } from "../canvas"; import type { Camera } from "./camera"; -import type { GLContext } from "./context"; -import { ShaderProgram } from "./shader"; -import broadcastVertSource from "./shaders/broadcast.vert?raw"; +import { Attribute, Shader, Uniform1f, Uniform1i, Uniform2f, Uniform4f, UniformMatrix4fv } from "./shader"; import broadcastFragSource from "./shaders/broadcast.frag?raw"; +import broadcastVertSource from "./shaders/broadcast.vert?raw"; export class BroadcastRenderer { - #glContext: GLContext; - #program: ShaderProgram; + #canvas: Canvas; + #program: Shader; #vao: WebGLVertexArrayObject; #positionBuffer: WebGLBuffer; #texCoordBuffer: WebGLBuffer; #indexBuffer: WebGLBuffer; - constructor(glContext: GLContext) { - this.#glContext = glContext; - const gl = glContext.gl; - - this.#program = new ShaderProgram(gl, broadcastVertSource, broadcastFragSource); - - const vao = gl.createVertexArray(); + // Typed uniforms + #u_projection: UniformMatrix4fv; + #u_bounds: Uniform4f; + #u_depth: Uniform1f; + #u_radius: Uniform1f; + #u_size: Uniform2f; + #u_opacity: Uniform1f; + #u_avatarTransition: Uniform1f; + #u_texture: Uniform1i; + #u_hasTexture: Uniform1i; + #u_avatarTexture: Uniform1i; + #u_hasAvatar: Uniform1i; + + // Typed attributes + #a_position: Attribute; + #a_texCoord: Attribute; + + constructor(canvas: Canvas) { + this.#canvas = canvas; + this.#program = new Shader(canvas.gl, broadcastVertSource, broadcastFragSource); + + // Initialize typed uniforms + this.#u_projection = this.#program.createUniformMatrix4fv("u_projection"); + this.#u_bounds = this.#program.createUniform4f("u_bounds"); + this.#u_depth = this.#program.createUniform1f("u_depth"); + this.#u_radius = this.#program.createUniform1f("u_radius"); + this.#u_size = this.#program.createUniform2f("u_size"); + this.#u_opacity = this.#program.createUniform1f("u_opacity"); + this.#u_avatarTransition = this.#program.createUniform1f("u_avatarTransition"); + this.#u_texture = this.#program.createUniform1i("u_texture"); + this.#u_hasTexture = this.#program.createUniform1i("u_hasTexture"); + this.#u_avatarTexture = this.#program.createUniform1i("u_avatarTexture"); + this.#u_hasAvatar = this.#program.createUniform1i("u_hasAvatar"); + + // Initialize typed attributes + this.#a_position = this.#program.createAttribute("a_position"); + this.#a_texCoord = this.#program.createAttribute("a_texCoord"); + + const vao = this.#canvas.gl.createVertexArray(); if (!vao) throw new Error("Failed to create VAO"); this.#vao = vao; - const positionBuffer = gl.createBuffer(); + const positionBuffer = this.#canvas.gl.createBuffer(); if (!positionBuffer) throw new Error("Failed to create position buffer"); this.#positionBuffer = positionBuffer; - const texCoordBuffer = gl.createBuffer(); + const texCoordBuffer = this.#canvas.gl.createBuffer(); if (!texCoordBuffer) throw new Error("Failed to create texCoord buffer"); this.#texCoordBuffer = texCoordBuffer; - const indexBuffer = gl.createBuffer(); + const indexBuffer = this.#canvas.gl.createBuffer(); if (!indexBuffer) throw new Error("Failed to create index buffer"); this.#indexBuffer = indexBuffer; @@ -39,7 +71,7 @@ export class BroadcastRenderer { } #setupBuffers() { - const gl = this.#glContext.gl; + const gl = this.#canvas.gl; // Quad vertices (0-1 range, will be scaled by bounds) const positions = new Float32Array([ @@ -73,16 +105,14 @@ export class BroadcastRenderer { // Position attribute gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer); gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW); - const posLoc = this.#program.getAttribute("a_position"); - gl.enableVertexAttribArray(posLoc); - gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0); + gl.enableVertexAttribArray(this.#a_position.location); + gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0); // TexCoord attribute gl.bindBuffer(gl.ARRAY_BUFFER, this.#texCoordBuffer); gl.bufferData(gl.ARRAY_BUFFER, texCoords, gl.STATIC_DRAW); - const texCoordLoc = this.#program.getAttribute("a_texCoord"); - gl.enableVertexAttribArray(texCoordLoc); - gl.vertexAttribPointer(texCoordLoc, 2, gl.FLOAT, false, 0, 0); + gl.enableVertexAttribArray(this.#a_texCoord.location); + gl.vertexAttribPointer(this.#a_texCoord.location, 2, gl.FLOAT, false, 0, 0); // Index buffer gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.#indexBuffer); @@ -100,48 +130,48 @@ export class BroadcastRenderer { hovering?: boolean; }, ) { - const gl = this.#glContext.gl; + const gl = this.#canvas.gl; const bounds = broadcast.bounds.peek(); const scale = broadcast.zoom.peek(); this.#program.use(); // Set projection matrix - this.#program.setUniformMatrix4fv("u_projection", camera.projection); + this.#u_projection.set(camera.projection); // Set bounds (x, y, width, height) - this.#program.setUniform4f("u_bounds", bounds.position.x, bounds.position.y, bounds.size.x, bounds.size.y); + this.#u_bounds.set(bounds.position.x, bounds.position.y, bounds.size.x, bounds.size.y); // Set depth based on z-index const depth = camera.zToDepth(broadcast.position.peek().z, maxZ); - this.#program.setUniform1f("u_depth", depth); + this.#u_depth.set(depth); // Set radius for rounded corners const radius = 12 * scale; - this.#program.setUniform1f("u_radius", radius); + this.#u_radius.set(radius); // Set size for SDF calculation - this.#program.setUniform2f("u_size", bounds.size.x, bounds.size.y); + this.#u_size.set(bounds.size.x, bounds.size.y); // Set opacity let opacity = broadcast.video.online; if (modifiers?.dragging) { opacity *= 0.7; } - this.#program.setUniform1f("u_opacity", opacity); + this.#u_opacity.set(opacity); // Set avatar transition (0 = avatar, 1 = video) - this.#program.setUniform1f("u_avatarTransition", broadcast.video.avatarTransition); + this.#u_avatarTransition.set(broadcast.video.avatarTransition); // Bind video texture if available const texture = broadcast.video.texture; if (texture) { gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, texture); - this.#program.setUniform1i("u_texture", 0); - this.#program.setUniform1i("u_hasTexture", 1); + this.#u_texture.set(0); + this.#u_hasTexture.set(1); } else { - this.#program.setUniform1i("u_hasTexture", 0); + this.#u_hasTexture.set(0); } // Bind avatar texture if available @@ -149,10 +179,10 @@ export class BroadcastRenderer { if (avatarTexture) { gl.activeTexture(gl.TEXTURE1); gl.bindTexture(gl.TEXTURE_2D, avatarTexture); - this.#program.setUniform1i("u_avatarTexture", 1); - this.#program.setUniform1i("u_hasAvatar", 1); + this.#u_avatarTexture.set(1); + this.#u_hasAvatar.set(1); } else { - this.#program.setUniform1i("u_hasAvatar", 0); + this.#u_hasAvatar.set(0); } // Draw @@ -162,7 +192,7 @@ export class BroadcastRenderer { } cleanup() { - const gl = this.#glContext.gl; + const gl = this.#canvas.gl; gl.deleteVertexArray(this.#vao); gl.deleteBuffer(this.#positionBuffer); gl.deleteBuffer(this.#texCoordBuffer); diff --git a/app/src/room/gl/shader.ts b/app/src/room/gl/shader.ts index f9624515..b705af8d 100644 --- a/app/src/room/gl/shader.ts +++ b/app/src/room/gl/shader.ts @@ -1,13 +1,103 @@ -export class ShaderProgram { +// Typed uniform wrappers +export class Uniform1f { + #location: WebGLUniformLocation; + #gl: WebGL2RenderingContext; + + constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) { + this.#location = location; + this.#gl = gl; + } + + set(value: number) { + this.#gl.uniform1f(this.#location, value); + } +} + +export class Uniform2f { + #location: WebGLUniformLocation; + #gl: WebGL2RenderingContext; + + constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) { + this.#location = location; + this.#gl = gl; + } + + set(x: number, y: number) { + this.#gl.uniform2f(this.#location, x, y); + } +} + +export class Uniform3f { + #location: WebGLUniformLocation; + #gl: WebGL2RenderingContext; + + constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) { + this.#location = location; + this.#gl = gl; + } + + set(x: number, y: number, z: number) { + this.#gl.uniform3f(this.#location, x, y, z); + } +} + +export class Uniform4f { + #location: WebGLUniformLocation; + #gl: WebGL2RenderingContext; + + constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) { + this.#location = location; + this.#gl = gl; + } + + set(x: number, y: number, z: number, w: number) { + this.#gl.uniform4f(this.#location, x, y, z, w); + } +} + +export class Uniform1i { + #location: WebGLUniformLocation; + #gl: WebGL2RenderingContext; + + constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) { + this.#location = location; + this.#gl = gl; + } + + set(value: number) { + this.#gl.uniform1i(this.#location, value); + } +} + +export class UniformMatrix4fv { + #location: WebGLUniformLocation; + #gl: WebGL2RenderingContext; + + constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) { + this.#location = location; + this.#gl = gl; + } + + set(value: Float32Array) { + this.#gl.uniformMatrix4fv(this.#location, false, value); + } +} + +// Typed attribute wrapper +export class Attribute { + readonly location: number; + + constructor(location: number) { + this.location = location; + } +} + +export class Shader { gl: WebGL2RenderingContext; program: WebGLProgram; - uniforms: Map; - attributes: Map; constructor(gl: WebGL2RenderingContext, vertexSource: string, fragmentSource: string) { this.gl = gl; - this.uniforms = new Map(); - this.attributes = new Map(); const vertexShader = this.#compileShader(gl.VERTEX_SHADER, vertexSource); const fragmentShader = this.#compileShader(gl.FRAGMENT_SHADER, fragmentSource); @@ -57,54 +147,49 @@ export class ShaderProgram { this.gl.useProgram(this.program); } - getUniform(name: string): WebGLUniformLocation { - let location = this.uniforms.get(name); - if (location === undefined) { - const loc = this.gl.getUniformLocation(this.program, name); - if (!loc) { - throw new Error(`Uniform ${name} not found`); - } - this.uniforms.set(name, loc); - location = loc; + #getUniform(name: string): WebGLUniformLocation { + const loc = this.gl.getUniformLocation(this.program, name); + if (!loc) { + throw new Error(`Uniform ${name} not found`); } - return location; - } - - getAttribute(name: string): number { - let location = this.attributes.get(name); - if (location === undefined) { - const loc = this.gl.getAttribLocation(this.program, name); - if (loc === -1) { - throw new Error(`Attribute ${name} not found`); - } - this.attributes.set(name, loc); - location = loc; + return loc; + } + + #getAttribute(name: string): number { + const loc = this.gl.getAttribLocation(this.program, name); + if (loc === -1) { + throw new Error(`Attribute ${name} not found`); } - return location; + return loc; + } + + // Typed wrapper factory methods + createUniform1f(name: string): Uniform1f { + return new Uniform1f(this.#getUniform(name), this.gl); } - setUniform1f(name: string, value: number) { - this.gl.uniform1f(this.getUniform(name), value); + createUniform2f(name: string): Uniform2f { + return new Uniform2f(this.#getUniform(name), this.gl); } - setUniform2f(name: string, x: number, y: number) { - this.gl.uniform2f(this.getUniform(name), x, y); + createUniform3f(name: string): Uniform3f { + return new Uniform3f(this.#getUniform(name), this.gl); } - setUniform3f(name: string, x: number, y: number, z: number) { - this.gl.uniform3f(this.getUniform(name), x, y, z); + createUniform4f(name: string): Uniform4f { + return new Uniform4f(this.#getUniform(name), this.gl); } - setUniform4f(name: string, x: number, y: number, z: number, w: number) { - this.gl.uniform4f(this.getUniform(name), x, y, z, w); + createUniform1i(name: string): Uniform1i { + return new Uniform1i(this.#getUniform(name), this.gl); } - setUniform1i(name: string, value: number) { - this.gl.uniform1i(this.getUniform(name), value); + createUniformMatrix4fv(name: string): UniformMatrix4fv { + return new UniformMatrix4fv(this.#getUniform(name), this.gl); } - setUniformMatrix4fv(name: string, value: Float32Array) { - this.gl.uniformMatrix4fv(this.getUniform(name), false, value); + createAttribute(name: string): Attribute { + return new Attribute(this.#getAttribute(name)); } cleanup() { diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 3748f88f..75f86224 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -3,7 +3,7 @@ import { Effect, Signal } from "@kixelated/signals"; import { Broadcast, BroadcastSource } from "./broadcast"; import type { Canvas } from "./canvas"; import { Vector } from "./geometry"; -import { BroadcastRenderer } from "./gl/broadcast-renderer"; +import { BroadcastRenderer } from "./gl/broadcast"; import type { Sound } from "./sound"; export type SpaceProps = { @@ -47,7 +47,7 @@ export class Space { this.profile = props?.profile ?? false; // Initialize WebGL renderer - this.#broadcastRenderer = new BroadcastRenderer(canvas.gl); + this.#broadcastRenderer = new BroadcastRenderer(canvas); // Use the new eventListener helper that automatically handles cleanup this.#signals.event(canvas.element, "mousedown", this.#onMouseDown.bind(this)); @@ -398,9 +398,6 @@ export class Space { add(id: string, source: BroadcastSource): Broadcast { const broadcast = new Broadcast({ source, canvas: this.canvas, sound: this.sound, scale: this.#scale }); - // Set GL context for video texture uploads - broadcast.video.setGLContext(this.canvas.gl.gl); - // Put new broadcasts on top of the stack. // NOTE: This is not sent over the network. broadcast.position.update((prev) => ({ diff --git a/app/src/room/video.ts b/app/src/room/video.ts index b9cae4ea..d7a5b996 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -35,31 +35,27 @@ export class Video { #nameOpacity = 0; // WebGL textures for this broadcast - texture?: WebGLTexture; // Video texture - avatarTexture?: WebGLTexture; // Avatar texture - #gl?: WebGL2RenderingContext; + texture: WebGLTexture; // Video texture + avatarTexture: WebGLTexture; // Avatar texture + #gl: WebGL2RenderingContext; constructor(broadcast: Broadcast) { this.broadcast = broadcast; - this.broadcast.signals.effect(this.#runAvatar.bind(this)); - this.broadcast.signals.effect(this.#runTargetSize.bind(this)); - this.broadcast.signals.effect(this.#runFrame.bind(this)); - } - setGLContext(gl: WebGL2RenderingContext) { - this.#gl = gl; + this.#gl = broadcast.canvas.gl; // Create the textures - this.texture = gl.createTexture(); - if (!this.texture) throw new Error("Failed to create video texture"); - - this.avatarTexture = gl.createTexture(); - if (!this.avatarTexture) throw new Error("Failed to create avatar texture"); + this.texture = this.#gl.createTexture(); + this.avatarTexture = this.#gl.createTexture(); // Set up texture upload effects this.broadcast.signals.effect(this.#uploadVideoTexture.bind(this)); this.broadcast.signals.effect(this.#uploadAvatarTexture.bind(this)); // TODO: Add meme texture upload effect + + this.broadcast.signals.effect(this.#runAvatar.bind(this)); + this.broadcast.signals.effect(this.#runTargetSize.bind(this)); + this.broadcast.signals.effect(this.#runFrame.bind(this)); } #runAvatar(effect: Effect) { @@ -77,7 +73,7 @@ export class Video { // For SVGs, load at higher resolution to avoid pixelation // Set a reasonable size (e.g., 512x512) for better quality - if (avatar.endsWith('.svg')) { + if (avatar.endsWith(".svg")) { newAvatar.width = 512; newAvatar.height = 512; } @@ -86,7 +82,9 @@ export class Video { const load = () => { this.avatar = newAvatar; - this.avatarSize.set(Vector.create(newAvatar.naturalWidth || newAvatar.width, newAvatar.naturalHeight || newAvatar.height)); + this.avatarSize.set( + Vector.create(newAvatar.naturalWidth || newAvatar.width, newAvatar.naturalHeight || newAvatar.height), + ); // Upload avatar texture after it loads if (this.#gl && this.avatarTexture) { @@ -187,7 +185,7 @@ export class Video { } #uploadVideoFrameToTexture(frame: VideoFrame) { - const gl = this.#gl!; + const gl = this.#gl; gl.bindTexture(gl.TEXTURE_2D, this.texture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, frame); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); @@ -198,7 +196,7 @@ export class Video { } #uploadVideoElementToTexture(video: HTMLVideoElement) { - const gl = this.#gl!; + const gl = this.#gl; gl.bindTexture(gl.TEXTURE_2D, this.texture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); @@ -209,7 +207,7 @@ export class Video { } #uploadImageToAvatarTexture(image: HTMLImageElement) { - const gl = this.#gl!; + const gl = this.#gl; gl.bindTexture(gl.TEXTURE_2D, this.avatarTexture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image); gl.generateMipmap(gl.TEXTURE_2D); @@ -240,16 +238,8 @@ export class Video { } close() { - if (this.#gl) { - if (this.texture) { - this.#gl.deleteTexture(this.texture); - this.texture = undefined; - } - if (this.avatarTexture) { - this.#gl.deleteTexture(this.avatarTexture); - this.avatarTexture = undefined; - } - } + this.#gl.deleteTexture(this.texture); + this.#gl.deleteTexture(this.avatarTexture); } // TODO: Rendering is now handled by WebGL in space.ts From 4efa89e373ce732f9e1c75bfb1c486f22a4df7dc Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 12:01:16 -0700 Subject: [PATCH 03/19] Initial meme support. --- app/src/room/gl/broadcast.ts | 96 ++++++++++++- app/src/room/gl/shaders/broadcast.frag | 27 +++- app/src/room/video.ts | 181 +++++++++++-------------- 3 files changed, 200 insertions(+), 104 deletions(-) diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index f481a958..02a2a54d 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -1,5 +1,6 @@ import type { Broadcast } from "../broadcast"; import { Canvas } from "../canvas"; +import { MEME_VIDEO, MEME_VIDEO_LOOKUP, type MemeVideoName } from "../meme"; import type { Camera } from "./camera"; import { Attribute, Shader, Uniform1f, Uniform1i, Uniform2f, Uniform4f, UniformMatrix4fv } from "./shader"; import broadcastFragSource from "./shaders/broadcast.frag?raw"; @@ -25,6 +26,10 @@ export class BroadcastRenderer { #u_hasTexture: Uniform1i; #u_avatarTexture: Uniform1i; #u_hasAvatar: Uniform1i; + #u_memeTexture: Uniform1i; + #u_hasMeme: Uniform1i; + #u_memeOpacity: Uniform1f; + #u_memeBounds: Uniform4f; // Typed attributes #a_position: Attribute; @@ -46,6 +51,10 @@ export class BroadcastRenderer { this.#u_hasTexture = this.#program.createUniform1i("u_hasTexture"); this.#u_avatarTexture = this.#program.createUniform1i("u_avatarTexture"); this.#u_hasAvatar = this.#program.createUniform1i("u_hasAvatar"); + this.#u_memeTexture = this.#program.createUniform1i("u_memeTexture"); + this.#u_hasMeme = this.#program.createUniform1i("u_hasMeme"); + this.#u_memeOpacity = this.#program.createUniform1f("u_memeOpacity"); + this.#u_memeBounds = this.#program.createUniform4f("u_memeBounds"); // Initialize typed attributes this.#a_position = this.#program.createAttribute("a_position"); @@ -164,7 +173,7 @@ export class BroadcastRenderer { this.#u_avatarTransition.set(broadcast.video.avatarTransition); // Bind video texture if available - const texture = broadcast.video.texture; + const texture = broadcast.video.webcamTexture; if (texture) { gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, texture); @@ -185,6 +194,91 @@ export class BroadcastRenderer { this.#u_hasAvatar.set(0); } + // Bind meme texture if available + const meme = broadcast.meme.peek(); + const memeTexture = broadcast.video.memeTexture; + if (meme instanceof HTMLVideoElement && memeTexture && meme.readyState >= meme.HAVE_CURRENT_DATA) { + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, memeTexture); + this.#u_memeTexture.set(2); + this.#u_hasMeme.set(1); + this.#u_memeOpacity.set(broadcast.video.memeOpacity); + + // Get meme configuration + const memeName = broadcast.memeName.peek(); + let fit: "contain" | "cover" = "cover"; + let position = "center"; + + if (memeName) { + const lookupKey = memeName.toLowerCase().replace(/-/g, ""); + const memeKey = MEME_VIDEO_LOOKUP[lookupKey] || memeName; + const memeData = MEME_VIDEO[memeKey as MemeVideoName]; + if (memeData) { + fit = memeData.fit || "cover"; + position = memeData.position || "center"; + } + } + + // Calculate meme bounds based on fit and position + const aspectRatio = meme.videoWidth / meme.videoHeight; + const boundsAspectRatio = bounds.size.x / bounds.size.y; + let width: number; + let height: number; + + if (fit === "contain") { + // Fit entire video within bounds + if (aspectRatio > boundsAspectRatio) { + width = 1.0; + height = boundsAspectRatio / aspectRatio; + } else { + height = 1.0; + width = aspectRatio / boundsAspectRatio; + } + } else { + // cover: fill the bounds (may crop) + if (aspectRatio > boundsAspectRatio) { + height = 1.0; + width = aspectRatio / boundsAspectRatio; + } else { + width = 1.0; + height = boundsAspectRatio / aspectRatio; + } + } + + // Parse position string + let xPos = 0.5; + let yPos = 0.5; + + const positionParts = position.toLowerCase().split(/\s+/); + for (const part of positionParts) { + if (part === "left") xPos = 0; + else if (part === "right") xPos = 1; + else if (part === "top") yPos = 0; + else if (part === "bottom") yPos = 1; + else if (part === "center") { + // Keep defaults + } else if (part.endsWith("%")) { + const value = parseFloat(part) / 100; + if (positionParts.length === 1) { + xPos = value; + } else if (positionParts.indexOf(part) === 0) { + xPos = value; + } else { + yPos = value; + } + } + } + + // Calculate offset in texture coordinates (0-1 range) + const x = (1.0 - width) * xPos; + const y = (1.0 - height) * yPos; + + // Set memeBounds as (x_offset, y_offset, width_scale, height_scale) + this.#u_memeBounds.set(x, y, width, height); + } else { + this.#u_hasMeme.set(0); + } + // Draw gl.bindVertexArray(this.#vao); gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0); diff --git a/app/src/room/gl/shaders/broadcast.frag b/app/src/room/gl/shaders/broadcast.frag index e7e9e80b..91961889 100644 --- a/app/src/room/gl/shaders/broadcast.frag +++ b/app/src/room/gl/shaders/broadcast.frag @@ -6,12 +6,16 @@ in vec2 v_pos; uniform sampler2D u_texture; uniform sampler2D u_avatarTexture; +uniform sampler2D u_memeTexture; uniform bool u_hasTexture; uniform bool u_hasAvatar; +uniform bool u_hasMeme; uniform float u_radius; uniform vec2 u_size; uniform float u_opacity; uniform float u_avatarTransition; // 0 = avatar, 1 = video +uniform float u_memeOpacity; +uniform vec4 u_memeBounds; // x, y, width, height in texture coordinates out vec4 fragColor; @@ -41,7 +45,26 @@ void main() { vec4 avatarColor = u_hasAvatar ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); // Blend between avatar and video based on transition - vec4 texColor = mix(avatarColor, videoColor, u_avatarTransition); + vec4 baseColor = mix(avatarColor, videoColor, u_avatarTransition); - fragColor = vec4(texColor.rgb, texColor.a * alpha * u_opacity); + // Apply meme overlay if present + if (u_hasMeme && u_memeOpacity > 0.0) { + // Calculate the meme texture coordinates based on memeBounds + // memeBounds contains the x, y offset and width, height scaling + vec2 memeTexCoord = (v_texCoord - u_memeBounds.xy) / u_memeBounds.zw; + + // Only sample if we're within the meme bounds + if (memeTexCoord.x >= 0.0 && memeTexCoord.x <= 1.0 && + memeTexCoord.y >= 0.0 && memeTexCoord.y <= 1.0) { + vec4 memeColor = texture(u_memeTexture, memeTexCoord); + + // Blend meme on top using alpha compositing + // The meme uses WebM+VP9 with alpha channel for transparency + float memeAlpha = memeColor.a * u_memeOpacity; + baseColor.rgb = mix(baseColor.rgb, memeColor.rgb, memeAlpha); + baseColor.a = max(baseColor.a, memeAlpha); + } + } + + fragColor = vec4(baseColor.rgb, baseColor.a * alpha * u_opacity); } diff --git a/app/src/room/video.ts b/app/src/room/video.ts index d7a5b996..54ea2dde 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -6,7 +6,7 @@ import { FakeBroadcast } from "./fake"; import { Vector } from "./geometry"; import { MEME_AUDIO, MEME_AUDIO_LOOKUP, MEME_VIDEO, MEME_VIDEO_LOOKUP, type MemeVideoName } from "./meme"; -export type VideoSource = Watch.Video.Source | Publish.Video.Encoder; +//export type VideoSource = Watch.Video.Source | Publish.Video.Encoder; export class Video { // We don't use the Video renderer that comes with hang because it assumes a single video source. @@ -22,9 +22,6 @@ export class Video { // The size of the avatar in pixels. avatarSize = new Signal(undefined); - // The current video frame. - frame?: CanvasImageSource; - // The desired size of the video in pixels. targetSize = new Signal(Vector.create(128, 128)); @@ -35,8 +32,9 @@ export class Video { #nameOpacity = 0; // WebGL textures for this broadcast - texture: WebGLTexture; // Video texture + webcamTexture: WebGLTexture; // Video texture avatarTexture: WebGLTexture; // Avatar texture + memeTexture: WebGLTexture; // Meme texture #gl: WebGL2RenderingContext; constructor(broadcast: Broadcast) { @@ -45,17 +43,15 @@ export class Video { this.#gl = broadcast.canvas.gl; // Create the textures - this.texture = this.#gl.createTexture(); + this.webcamTexture = this.#gl.createTexture(); this.avatarTexture = this.#gl.createTexture(); + this.memeTexture = this.#gl.createTexture(); // Set up texture upload effects - this.broadcast.signals.effect(this.#uploadVideoTexture.bind(this)); - this.broadcast.signals.effect(this.#uploadAvatarTexture.bind(this)); - // TODO: Add meme texture upload effect - + this.broadcast.signals.effect(this.#runWebcam.bind(this)); + this.broadcast.signals.effect(this.#runMeme.bind(this)); this.broadcast.signals.effect(this.#runAvatar.bind(this)); this.broadcast.signals.effect(this.#runTargetSize.bind(this)); - this.broadcast.signals.effect(this.#runFrame.bind(this)); } #runAvatar(effect: Effect) { @@ -74,29 +70,30 @@ export class Video { // For SVGs, load at higher resolution to avoid pixelation // Set a reasonable size (e.g., 512x512) for better quality if (avatar.endsWith(".svg")) { + // TODO Automatically adjust? newAvatar.width = 512; newAvatar.height = 512; } newAvatar.src = avatar; - const load = () => { - this.avatar = newAvatar; - this.avatarSize.set( - Vector.create(newAvatar.naturalWidth || newAvatar.width, newAvatar.naturalHeight || newAvatar.height), - ); - - // Upload avatar texture after it loads - if (this.#gl && this.avatarTexture) { - try { - this.#uploadImageToAvatarTexture(this.avatar); - } catch (err) { - console.error("Failed to upload avatar texture on load:", err); - } - } - }; + // Once the avatar loads, upload it to the texture + effect.event(newAvatar, "load", this.#uploadAvatar.bind(this, newAvatar)); + } + + #uploadAvatar(avatar: HTMLImageElement) { + this.avatar = avatar; + this.avatarSize.set(Vector.create(avatar.naturalWidth || avatar.width, avatar.naturalHeight || avatar.height)); - effect.event(newAvatar, "load", load); + const gl = this.#gl; + gl.bindTexture(gl.TEXTURE_2D, this.avatarTexture); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, avatar); + gl.generateMipmap(gl.TEXTURE_2D); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.bindTexture(gl.TEXTURE_2D, null); } #runTargetSize(effect: Effect) { @@ -124,70 +121,33 @@ export class Video { this.targetSize.set(Vector.create(128, 128)); } - #runFrame(effect: Effect) { + #runWebcam(effect: Effect) { if (this.broadcast.source instanceof FakeBroadcast) { // TODO FakeBroadcast should return a VideoFrame instead of a HTMLVideoElement. - this.frame = effect.get(this.broadcast.source.video.frame); + const video = effect.get(this.broadcast.source.video.frame); + if (!video) return; + this.#videoToTexture(effect, video, this.webcamTexture); } else { - const frame = effect.get(this.broadcast.source.video.frame)?.clone(); - effect.cleanup(() => frame?.close()); - this.frame = frame; + const frame = effect.get(this.broadcast.source.video.frame); + if (!frame) return; + this.#frameToTexture(frame, this.webcamTexture); } } - // Effect: Upload video frame to texture when it changes - #uploadVideoTexture(effect: Effect) { - if (!this.#gl || !this.texture) return; + #runMeme(effect: Effect) { + const meme = effect.get(this.broadcast.meme); + if (!meme) return; - // Listen to the actual video frame signal from the source - let frame: VideoFrame | HTMLVideoElement | undefined; + // Only handle video memes (audio memes are just sound effects) + if (!(meme instanceof HTMLVideoElement)) return; - if (this.broadcast.source instanceof FakeBroadcast) { - frame = effect.get(this.broadcast.source.video.frame); - } else { - frame = effect.get(this.broadcast.source.video.frame); - } - - if (!frame) { - // No video frame available - return; - } - - try { - if (frame instanceof VideoFrame) { - this.#uploadVideoFrameToTexture(frame); - } else if (frame instanceof HTMLVideoElement) { - this.#uploadVideoElementToTexture(frame); - } - } catch (err) { - console.error("Failed to upload video texture:", err); - } - } - - // Effect: Upload avatar to texture when it loads - #uploadAvatarTexture(effect: Effect) { - if (!this.#gl || !this.avatarTexture) return; - - // Listen to the avatar signal - const avatarUrl = effect.get(this.broadcast.source.user.avatar); - if (!avatarUrl) return; - - // Wait for the image to load - if (!this.avatar.complete || this.avatar.src !== avatarUrl) { - return; - } - - try { - this.#uploadImageToAvatarTexture(this.avatar); - } catch (err) { - console.error("Failed to upload avatar texture:", err); - } + this.#videoToTexture(effect, meme, this.memeTexture); } - #uploadVideoFrameToTexture(frame: VideoFrame) { + #frameToTexture(src: VideoFrame, dst: WebGLTexture) { const gl = this.#gl; - gl.bindTexture(gl.TEXTURE_2D, this.texture); - gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, frame); + gl.bindTexture(gl.TEXTURE_2D, dst); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, src); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); @@ -195,31 +155,31 @@ export class Video { gl.bindTexture(gl.TEXTURE_2D, null); } - #uploadVideoElementToTexture(video: HTMLVideoElement) { + #videoToTexture(effect: Effect, src: HTMLVideoElement, dst: WebGLTexture) { const gl = this.#gl; - gl.bindTexture(gl.TEXTURE_2D, this.texture); - gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); - gl.bindTexture(gl.TEXTURE_2D, null); - } - #uploadImageToAvatarTexture(image: HTMLImageElement) { - const gl = this.#gl; - gl.bindTexture(gl.TEXTURE_2D, this.avatarTexture); - gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image); - gl.generateMipmap(gl.TEXTURE_2D); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); - gl.bindTexture(gl.TEXTURE_2D, null); + let cancel: number; + const onFrame = () => { + gl.bindTexture(gl.TEXTURE_2D, dst); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, src); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.bindTexture(gl.TEXTURE_2D, null); + + if (!src.paused && !src.ended) { + cancel = src.requestVideoFrameCallback(onFrame); + } + }; + + cancel = src.requestVideoFrameCallback(onFrame); + + effect.cleanup(() => src.cancelVideoFrameCallback(cancel)); } tick() { - if (this.frame) { + if (this.broadcast.source.video.frame.peek()) { this.avatarTransition = Math.min(this.avatarTransition + 0.05, 1); } else { this.avatarTransition = Math.max(this.avatarTransition - 0.05, 0); @@ -231,15 +191,34 @@ export class Video { this.online += (0 - this.online) * 0.1; } + // Update meme opacity + const meme = this.broadcast.meme.peek(); + if (meme) { + if (meme.ended || (meme.paused && meme.currentTime > 0)) { + this.#memeOpacity += -this.#memeOpacity * 0.1; + if (this.#memeOpacity <= 0) { + this.broadcast.meme.set(undefined); + this.broadcast.memeName.set(undefined); + } + } else { + this.#memeOpacity += (1 - this.#memeOpacity) * 0.1; + } + } + /* const ZOOM_SPEED = 0.005; this.#zoom = this.#zoom.lerp(this.#zoomTarget, ZOOM_SPEED); */ } + get memeOpacity(): number { + return this.#memeOpacity; + } + close() { - this.#gl.deleteTexture(this.texture); + this.#gl.deleteTexture(this.webcamTexture); this.#gl.deleteTexture(this.avatarTexture); + this.#gl.deleteTexture(this.memeTexture); } // TODO: Rendering is now handled by WebGL in space.ts From d1034d724a421a327db20f63e43092314186139b Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 12:06:02 -0700 Subject: [PATCH 04/19] Meme stuff. --- app/src/room/gl/broadcast.ts | 75 ++---------------------------- app/src/room/video.ts | 88 ++++++++++++++++++++++++++++++++++++ 2 files changed, 91 insertions(+), 72 deletions(-) diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index 02a2a54d..7b81230e 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -1,6 +1,5 @@ import type { Broadcast } from "../broadcast"; import { Canvas } from "../canvas"; -import { MEME_VIDEO, MEME_VIDEO_LOOKUP, type MemeVideoName } from "../meme"; import type { Camera } from "./camera"; import { Attribute, Shader, Uniform1f, Uniform1i, Uniform2f, Uniform4f, UniformMatrix4fv } from "./shader"; import broadcastFragSource from "./shaders/broadcast.frag?raw"; @@ -204,77 +203,9 @@ export class BroadcastRenderer { this.#u_hasMeme.set(1); this.#u_memeOpacity.set(broadcast.video.memeOpacity); - // Get meme configuration - const memeName = broadcast.memeName.peek(); - let fit: "contain" | "cover" = "cover"; - let position = "center"; - - if (memeName) { - const lookupKey = memeName.toLowerCase().replace(/-/g, ""); - const memeKey = MEME_VIDEO_LOOKUP[lookupKey] || memeName; - const memeData = MEME_VIDEO[memeKey as MemeVideoName]; - if (memeData) { - fit = memeData.fit || "cover"; - position = memeData.position || "center"; - } - } - - // Calculate meme bounds based on fit and position - const aspectRatio = meme.videoWidth / meme.videoHeight; - const boundsAspectRatio = bounds.size.x / bounds.size.y; - let width: number; - let height: number; - - if (fit === "contain") { - // Fit entire video within bounds - if (aspectRatio > boundsAspectRatio) { - width = 1.0; - height = boundsAspectRatio / aspectRatio; - } else { - height = 1.0; - width = aspectRatio / boundsAspectRatio; - } - } else { - // cover: fill the bounds (may crop) - if (aspectRatio > boundsAspectRatio) { - height = 1.0; - width = aspectRatio / boundsAspectRatio; - } else { - width = 1.0; - height = boundsAspectRatio / aspectRatio; - } - } - - // Parse position string - let xPos = 0.5; - let yPos = 0.5; - - const positionParts = position.toLowerCase().split(/\s+/); - for (const part of positionParts) { - if (part === "left") xPos = 0; - else if (part === "right") xPos = 1; - else if (part === "top") yPos = 0; - else if (part === "bottom") yPos = 1; - else if (part === "center") { - // Keep defaults - } else if (part.endsWith("%")) { - const value = parseFloat(part) / 100; - if (positionParts.length === 1) { - xPos = value; - } else if (positionParts.indexOf(part) === 0) { - xPos = value; - } else { - yPos = value; - } - } - } - - // Calculate offset in texture coordinates (0-1 range) - const x = (1.0 - width) * xPos; - const y = (1.0 - height) * yPos; - - // Set memeBounds as (x_offset, y_offset, width_scale, height_scale) - this.#u_memeBounds.set(x, y, width, height); + // Use pre-computed meme bounds from Video class + const memeBounds = broadcast.video.memeBounds; + this.#u_memeBounds.set(memeBounds.x, memeBounds.y, memeBounds.width, memeBounds.height); } else { this.#u_hasMeme.set(0); } diff --git a/app/src/room/video.ts b/app/src/room/video.ts index 54ea2dde..a4174bef 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -31,6 +31,9 @@ export class Video { #memeOpacity = 0; #nameOpacity = 0; + // Cached meme bounds (x_offset, y_offset, width_scale, height_scale) + memeBounds = { x: 0, y: 0, width: 1, height: 1 }; + // WebGL textures for this broadcast webcamTexture: WebGLTexture; // Video texture avatarTexture: WebGLTexture; // Avatar texture @@ -50,6 +53,7 @@ export class Video { // Set up texture upload effects this.broadcast.signals.effect(this.#runWebcam.bind(this)); this.broadcast.signals.effect(this.#runMeme.bind(this)); + this.broadcast.signals.effect(this.#runMemeBounds.bind(this)); this.broadcast.signals.effect(this.#runAvatar.bind(this)); this.broadcast.signals.effect(this.#runTargetSize.bind(this)); } @@ -144,6 +148,90 @@ export class Video { this.#videoToTexture(effect, meme, this.memeTexture); } + #runMemeBounds(effect: Effect) { + const meme = effect.get(this.broadcast.meme); + if (!meme || !(meme instanceof HTMLVideoElement)) { + return; + } + + // Also react to bounds changes + const bounds = effect.get(this.broadcast.bounds); + + // Wait until video metadata is loaded + if (meme.videoWidth === 0 || meme.videoHeight === 0) return; + + // Get meme configuration + const memeName = effect.get(this.broadcast.memeName); + let fit: "contain" | "cover" = "cover"; + let position = "center"; + + if (memeName) { + const lookupKey = memeName.toLowerCase().replace(/-/g, ""); + const memeKey = MEME_VIDEO_LOOKUP[lookupKey] || memeName; + const memeData = MEME_VIDEO[memeKey as MemeVideoName]; + if (memeData) { + fit = memeData.fit || "cover"; + position = memeData.position || "center"; + } + } + + // Calculate meme bounds based on fit and position + const aspectRatio = meme.videoWidth / meme.videoHeight; + const boundsAspectRatio = bounds.size.x / bounds.size.y; + let width: number; + let height: number; + + if (fit === "contain") { + // Fit entire video within bounds + if (aspectRatio > boundsAspectRatio) { + width = 1.0; + height = boundsAspectRatio / aspectRatio; + } else { + height = 1.0; + width = aspectRatio / boundsAspectRatio; + } + } else { + // cover: fill the bounds (may crop) + if (aspectRatio > boundsAspectRatio) { + height = 1.0; + width = aspectRatio / boundsAspectRatio; + } else { + width = 1.0; + height = boundsAspectRatio / aspectRatio; + } + } + + // Parse position string + let xPos = 0.5; + let yPos = 0.5; + + const positionParts = position.toLowerCase().split(/\s+/); + for (const part of positionParts) { + if (part === "left") xPos = 0; + else if (part === "right") xPos = 1; + else if (part === "top") yPos = 0; + else if (part === "bottom") yPos = 1; + else if (part === "center") { + // Keep defaults + } else if (part.endsWith("%")) { + const value = parseFloat(part) / 100; + if (positionParts.length === 1) { + xPos = value; + } else if (positionParts.indexOf(part) === 0) { + xPos = value; + } else { + yPos = value; + } + } + } + + // Calculate offset in texture coordinates (0-1 range) + this.memeBounds.x = (1.0 - width) * xPos; + this.memeBounds.y = (1.0 - height) * yPos; + this.memeBounds.width = width; + this.memeBounds.height = height; + } + #frameToTexture(src: VideoFrame, dst: WebGLTexture) { const gl = this.#gl; gl.bindTexture(gl.TEXTURE_2D, dst); From 7d62b2ff589cafb20372f52e7aac00fa98541b5b Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 13:52:25 -0700 Subject: [PATCH 05/19] WIP --- app/src/room/audio.ts | 68 ++++++---- app/src/room/broadcast.ts | 1 + app/src/room/gl/border.ts | 143 ++++++++++++++++++++ app/src/room/gl/broadcast.ts | 9 +- app/src/room/gl/camera.ts | 5 +- app/src/room/gl/outline.ts | 194 +++++++++++++++++++++++++++ app/src/room/gl/shaders/border.frag | 50 +++++++ app/src/room/gl/shaders/border.vert | 19 +++ app/src/room/gl/shaders/outline.frag | 102 ++++++++++++++ app/src/room/gl/shaders/outline.vert | 19 +++ app/src/room/space.ts | 39 ++++-- app/src/room/video.ts | 14 +- 12 files changed, 615 insertions(+), 48 deletions(-) create mode 100644 app/src/room/gl/border.ts create mode 100644 app/src/room/gl/outline.ts create mode 100644 app/src/room/gl/shaders/border.frag create mode 100644 app/src/room/gl/shaders/border.vert create mode 100644 app/src/room/gl/shaders/outline.frag create mode 100644 app/src/room/gl/shaders/outline.vert diff --git a/app/src/room/audio.ts b/app/src/room/audio.ts index 02e86ba7..b97aefca 100644 --- a/app/src/room/audio.ts +++ b/app/src/room/audio.ts @@ -29,8 +29,8 @@ export class Audio { #volumeSmoothed = 0; - #speaking = false; - #speakingAlpha = 0; + // Public volume for visualization (0 to 1) + volume = 0; #signals = new Effect(); @@ -98,12 +98,6 @@ export class Audio { if (!(this.broadcast.source instanceof Publish.Broadcast)) { this.#signals.effect(this.#runOutput.bind(this)); } - - // Track speaking state from publish broadcast - this.#signals.effect((effect) => { - const speaking = effect.get(this.broadcast.source.audio.speaking.active); - this.#speaking = speaking ?? false; - }); } #runOutput(effect: Effect) { @@ -134,26 +128,44 @@ export class Audio { } } - // TODO: Audio visualization will be implemented with WebGL shaders - // renderBackground(ctx: CanvasRenderingContext2D) { - // // Black background outline - // } - - // render(ctx: CanvasRenderingContext2D) { - // // Audio visualization with colored fill based on volume - // } - - #roundedRectPath(ctx: CanvasRenderingContext2D, x: number, y: number, w: number, h: number, r: number) { - const maxR = Math.min(r, w / 2, h / 2); - ctx.moveTo(x + maxR, y); - ctx.lineTo(x + w - maxR, y); - ctx.quadraticCurveTo(x + w, y, x + w, y + maxR); - ctx.lineTo(x + w, y + h - maxR); - ctx.quadraticCurveTo(x + w, y + h, x + w - maxR, y + h); - ctx.lineTo(x + maxR, y + h); - ctx.quadraticCurveTo(x, y + h, x, y + h - maxR); - ctx.lineTo(x, y + maxR); - ctx.quadraticCurveTo(x, y, x + maxR, y); + tick() { + // Get audio from the notification/meme context + const soundBuffer = this.sound.analyze(); + if (!soundBuffer) { + this.volume *= 0.95; // Fade out when no analyser + return; + } + + // Take the absolute value of the distance from 128 (silence) + for (let i = 0; i < soundBuffer.length; i++) { + soundBuffer[i] = Math.abs(soundBuffer[i] - 128); + } + + // If the broadcast audio is playing, combine the buffers + if (this.#analyser) { + if (this.#analyserBuffer.length !== soundBuffer.length) { + throw new Error("analyser buffer length mismatch"); + } + + this.#analyser.getByteTimeDomainData(this.#analyserBuffer); + for (let i = 0; i < this.#analyserBuffer.length; i++) { + soundBuffer[i] += Math.abs(this.#analyserBuffer[i] - 128); + } + } + + // Calculate RMS volume + let sum = 0; + for (let i = 0; i < soundBuffer.length; i++) { + const sample = soundBuffer[i]; + sum += sample * sample; + } + const volume = Math.sqrt(sum) / soundBuffer.length; + + // Smooth the volume with exponential moving average + this.#volumeSmoothed = this.#volumeSmoothed * 0.7 + volume * 0.3; + + // Store the smoothed volume (already in the right range from the buffer values) + this.volume = this.#volumeSmoothed; } close() { diff --git a/app/src/room/broadcast.ts b/app/src/room/broadcast.ts index f4220e9a..bae4b44b 100644 --- a/app/src/room/broadcast.ts +++ b/app/src/room/broadcast.ts @@ -195,6 +195,7 @@ export class Broadcast { // TODO Also make scale a signal tick() { this.video.tick(); + this.audio.tick(); const bounds = this.bounds.peek(); const viewport = this.canvas.viewport.peek(); diff --git a/app/src/room/gl/border.ts b/app/src/room/gl/border.ts new file mode 100644 index 00000000..40928b22 --- /dev/null +++ b/app/src/room/gl/border.ts @@ -0,0 +1,143 @@ +import type { Broadcast } from "../broadcast"; +import { Canvas } from "../canvas"; +import type { Camera } from "./camera"; +import { Attribute, Shader, Uniform1f, Uniform2f, Uniform4f, UniformMatrix4fv } from "./shader"; +import borderFragSource from "./shaders/border.frag?raw"; +import borderVertSource from "./shaders/border.vert?raw"; + +export class BorderRenderer { + #canvas: Canvas; + #program: Shader; + #vao: WebGLVertexArrayObject; + #positionBuffer: WebGLBuffer; + #indexBuffer: WebGLBuffer; + + // Typed uniforms + #u_projection: UniformMatrix4fv; + #u_bounds: Uniform4f; + #u_depth: Uniform1f; + #u_radius: Uniform1f; + #u_size: Uniform2f; + #u_opacity: Uniform1f; + #u_border: Uniform1f; + + // Typed attributes + #a_position: Attribute; + + constructor(canvas: Canvas) { + this.#canvas = canvas; + this.#program = new Shader(canvas.gl, borderVertSource, borderFragSource); + + // Initialize typed uniforms + this.#u_projection = this.#program.createUniformMatrix4fv("u_projection"); + this.#u_bounds = this.#program.createUniform4f("u_bounds"); + this.#u_depth = this.#program.createUniform1f("u_depth"); + this.#u_radius = this.#program.createUniform1f("u_radius"); + this.#u_size = this.#program.createUniform2f("u_size"); + this.#u_opacity = this.#program.createUniform1f("u_opacity"); + this.#u_border = this.#program.createUniform1f("u_border"); + + // Initialize typed attributes + this.#a_position = this.#program.createAttribute("a_position"); + + const vao = this.#canvas.gl.createVertexArray(); + if (!vao) throw new Error("Failed to create VAO"); + this.#vao = vao; + + const positionBuffer = this.#canvas.gl.createBuffer(); + if (!positionBuffer) throw new Error("Failed to create position buffer"); + this.#positionBuffer = positionBuffer; + + const indexBuffer = this.#canvas.gl.createBuffer(); + if (!indexBuffer) throw new Error("Failed to create index buffer"); + this.#indexBuffer = indexBuffer; + + this.#setupBuffers(); + } + + #setupBuffers() { + const gl = this.#canvas.gl; + + // Quad vertices (0-1 range, will be scaled by bounds) + const positions = new Float32Array([ + 0, + 0, // Top-left + 1, + 0, // Top-right + 1, + 1, // Bottom-right + 0, + 1, // Bottom-left + ]); + + // Indices for two triangles + const indices = new Uint16Array([0, 1, 2, 0, 2, 3]); + + gl.bindVertexArray(this.#vao); + + // Position attribute + gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW); + gl.enableVertexAttribArray(this.#a_position.location); + gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0); + + // Index buffer + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.#indexBuffer); + gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW); + + gl.bindVertexArray(null); + } + + render(broadcast: Broadcast, camera: Camera, maxZ: number) { + const gl = this.#canvas.gl; + const bounds = broadcast.bounds.peek(); + const scale = broadcast.zoom.peek(); + + this.#program.use(); + + // Set projection matrix + this.#u_projection.set(camera.projection); + + // Border size (PADDING from old implementation) + const border = 12 * scale; + + // Bounds need to include the border expansion + this.#u_bounds.set( + bounds.position.x - border, + bounds.position.y - border, + bounds.size.x + border * 2, + bounds.size.y + border * 2, + ); + + // Set depth - borders should be behind everything + const baseDepth = camera.zToDepth(broadcast.position.peek().z, maxZ); + const depth = baseDepth - 0.04; // Further behind than audio viz + this.#u_depth.set(depth); + + // Set radius for rounded corners + this.#u_radius.set(border); + + // Set size for SDF calculation + this.#u_size.set(bounds.size.x + border * 2, bounds.size.y + border * 2); + + // Set opacity + const opacity = broadcast.video.online; + this.#u_opacity.set(opacity); + + // Set border size + this.#u_border.set(border); + + // Draw + gl.bindVertexArray(this.#vao); + gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0); + gl.bindVertexArray(null); + } + + cleanup() { + const gl = this.#canvas.gl; + gl.deleteVertexArray(this.#vao); + gl.deleteBuffer(this.#positionBuffer); + gl.deleteBuffer(this.#indexBuffer); + this.#program.cleanup(); + } +} diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index 7b81230e..8ef515a2 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -196,7 +196,13 @@ export class BroadcastRenderer { // Bind meme texture if available const meme = broadcast.meme.peek(); const memeTexture = broadcast.video.memeTexture; - if (meme instanceof HTMLVideoElement && memeTexture && meme.readyState >= meme.HAVE_CURRENT_DATA) { + const memeBounds = broadcast.video.memeBounds; + if ( + meme instanceof HTMLVideoElement && + memeTexture && + meme.readyState >= meme.HAVE_CURRENT_DATA && + memeBounds + ) { gl.activeTexture(gl.TEXTURE2); gl.bindTexture(gl.TEXTURE_2D, memeTexture); this.#u_memeTexture.set(2); @@ -204,7 +210,6 @@ export class BroadcastRenderer { this.#u_memeOpacity.set(broadcast.video.memeOpacity); // Use pre-computed meme bounds from Video class - const memeBounds = broadcast.video.memeBounds; this.#u_memeBounds.set(memeBounds.x, memeBounds.y, memeBounds.width, memeBounds.height); } else { this.#u_hasMeme.set(0); diff --git a/app/src/room/gl/camera.ts b/app/src/room/gl/camera.ts index aaadbca7..e266cce3 100644 --- a/app/src/room/gl/camera.ts +++ b/app/src/room/gl/camera.ts @@ -39,11 +39,12 @@ export class Camera { } // Convert z-index to depth value - // Higher z-index = closer to camera (lower depth value) + // Higher z-index = closer to camera (lower depth value for LEQUAL test) zToDepth(z: number, maxZ: number): number { // Normalize z-index to 0-1 range, then map to depth range // We use a small range to keep everything mostly 2D + // Invert so higher z = more negative (closer to camera) const normalized = maxZ > 0 ? z / maxZ : 0; - return -normalized * 0.01; // Small depth range for subtle effect + return -(1.0 - normalized) * 0.01; // Higher z = closer to 0 (front) } } diff --git a/app/src/room/gl/outline.ts b/app/src/room/gl/outline.ts new file mode 100644 index 00000000..ab406f99 --- /dev/null +++ b/app/src/room/gl/outline.ts @@ -0,0 +1,194 @@ +import type { Broadcast } from "../broadcast"; +import { Canvas } from "../canvas"; +import type { Camera } from "./camera"; +import { Attribute, Shader, Uniform1f, Uniform2f, Uniform3f, Uniform4f, UniformMatrix4fv } from "./shader"; +import outlineFragSource from "./shaders/outline.frag?raw"; +import outlineVertSource from "./shaders/outline.vert?raw"; + +export class OutlineRenderer { + #canvas: Canvas; + #program: Shader; + #vao: WebGLVertexArrayObject; + #positionBuffer: WebGLBuffer; + #indexBuffer: WebGLBuffer; + + // Typed uniforms + #u_projection: UniformMatrix4fv; + #u_bounds: Uniform4f; + #u_depth: Uniform1f; + #u_radius: Uniform1f; + #u_size: Uniform2f; + #u_opacity: Uniform1f; + #u_volume: Uniform1f; + #u_border: Uniform1f; + #u_color: Uniform3f; + #u_time: Uniform1f; + + // Typed attributes + #a_position: Attribute; + + #startTime: number; + + constructor(canvas: Canvas) { + this.#canvas = canvas; + this.#program = new Shader(canvas.gl, outlineVertSource, outlineFragSource); + + // Initialize typed uniforms + this.#u_projection = this.#program.createUniformMatrix4fv("u_projection"); + this.#u_bounds = this.#program.createUniform4f("u_bounds"); + this.#u_depth = this.#program.createUniform1f("u_depth"); + this.#u_radius = this.#program.createUniform1f("u_radius"); + this.#u_size = this.#program.createUniform2f("u_size"); + this.#u_opacity = this.#program.createUniform1f("u_opacity"); + this.#u_volume = this.#program.createUniform1f("u_volume"); + this.#u_border = this.#program.createUniform1f("u_border"); + this.#u_color = this.#program.createUniform3f("u_color"); + this.#u_time = this.#program.createUniform1f("u_time"); + + // Initialize typed attributes + this.#a_position = this.#program.createAttribute("a_position"); + + const vao = this.#canvas.gl.createVertexArray(); + if (!vao) throw new Error("Failed to create VAO"); + this.#vao = vao; + + const positionBuffer = this.#canvas.gl.createBuffer(); + if (!positionBuffer) throw new Error("Failed to create position buffer"); + this.#positionBuffer = positionBuffer; + + const indexBuffer = this.#canvas.gl.createBuffer(); + if (!indexBuffer) throw new Error("Failed to create index buffer"); + this.#indexBuffer = indexBuffer; + + this.#startTime = performance.now(); + this.#setupBuffers(); + } + + #setupBuffers() { + const gl = this.#canvas.gl; + + // Quad vertices (0-1 range, will be scaled by bounds) + const positions = new Float32Array([ + 0, + 0, // Top-left + 1, + 0, // Top-right + 1, + 1, // Bottom-right + 0, + 1, // Bottom-left + ]); + + // Indices for two triangles + const indices = new Uint16Array([0, 1, 2, 0, 2, 3]); + + gl.bindVertexArray(this.#vao); + + // Position attribute + gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW); + gl.enableVertexAttribArray(this.#a_position.location); + gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0); + + // Index buffer + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.#indexBuffer); + gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW); + + gl.bindVertexArray(null); + } + + render(broadcast: Broadcast, camera: Camera, maxZ: number, now: DOMHighResTimeStamp) { + const gl = this.#canvas.gl; + const bounds = broadcast.bounds.peek(); + const scale = broadcast.zoom.peek(); + const volume = broadcast.audio.volume; + + this.#program.use(); + + // Set projection matrix + this.#u_projection.set(camera.projection); + + // Border size (PADDING from old implementation) + const border = 12 * scale; + + // Expand bounds to accommodate ripple and line width + // Line can extend: lineInset(2) + lineWidth(3) + aaWidth(2) + ripple beyond border + const maxExpansion = border * 1.5; + + // Bounds need to include the border expansion plus ripple space + this.#u_bounds.set( + bounds.position.x - maxExpansion, + bounds.position.y - maxExpansion, + bounds.size.x + maxExpansion * 2, + bounds.size.y + maxExpansion * 2, + ); + + // Set depth - outline should be behind ALL videos + // Videos are in range -0.01 to 0 (based on z-index) + // Add a tiny offset to make outlines slightly further + const baseDepth = camera.zToDepth(broadcast.position.peek().z, maxZ); + const depth = baseDepth - 0.02; // More negative = further away, behind all videos + this.#u_depth.set(depth); + + // Set radius for rounded corners + this.#u_radius.set(border); + + // Set size for SDF calculation - this is the total quad size (video + 2*maxExpansion) + // The shader will calculate videoSize by subtracting 2*border from this + this.#u_size.set(bounds.size.x + maxExpansion * 2, bounds.size.y + maxExpansion * 2); + + // Apply opacity based on volume and video online status + const opacity = broadcast.video.online ? Math.min(10 * volume, 1) : 0; + this.#u_opacity.set(opacity); + + // Set volume (smoothed, from 0-1) + this.#u_volume.set(volume); + + // Set border size + this.#u_border.set(border); + + // Set time for animation + const time = (now - this.#startTime) / 1000; + this.#u_time.set(time); + + // Set color based on volume using HSL from old implementation + // hue = 180 + volume * 120 + const hue = 180 + volume * 120; + + // Convert HSL to RGB + const h = hue / 360; + const s = 0.8; + const l = 0.45; + + const hueToRgb = (p: number, q: number, t: number) => { + if (t < 0) t += 1; + if (t > 1) t -= 1; + if (t < 1 / 6) return p + (q - p) * 6 * t; + if (t < 1 / 2) return q; + if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6; + return p; + }; + + // Convert HSL to RGB + const q = l < 0.5 ? l * (1 + s) : l + s - l * s; + const p = 2 * l - q; + const r = hueToRgb(p, q, h + 1 / 3); + const g = hueToRgb(p, q, h); + const b = hueToRgb(p, q, h - 1 / 3); + + this.#u_color.set(r, g, b); + + // Draw + gl.bindVertexArray(this.#vao); + gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0); + gl.bindVertexArray(null); + } + + cleanup() { + const gl = this.#canvas.gl; + gl.deleteVertexArray(this.#vao); + gl.deleteBuffer(this.#positionBuffer); + gl.deleteBuffer(this.#indexBuffer); + this.#program.cleanup(); + } +} diff --git a/app/src/room/gl/shaders/border.frag b/app/src/room/gl/shaders/border.frag new file mode 100644 index 00000000..2eb64433 --- /dev/null +++ b/app/src/room/gl/shaders/border.frag @@ -0,0 +1,50 @@ +#version 300 es +precision highp float; + +in vec2 v_pos; + +uniform float u_radius; +uniform vec2 u_size; +uniform float u_opacity; +uniform float u_border; // Border size in pixels + +out vec4 fragColor; + +// Signed distance function for rounded rectangle +float roundedBoxSDF(vec2 center, vec2 size, float radius) { + vec2 q = abs(center) - size + radius; + return min(max(q.x, q.y), 0.0) + length(max(q, 0.0)) - radius; +} + +void main() { + // v_pos is 0-1 in the quad + // u_size is the total bounds size (video + border on each side) + // u_border is the border width + + // Calculate position from center of the bounds + vec2 center = (v_pos - 0.5) * u_size; + + // The video occupies the center: u_size - 2*border + vec2 videoSize = u_size - vec2(u_border * 2.0); + + // Outer edge of the entire thing (edge of black border) + float outerDist = roundedBoxSDF(center, u_size * 0.5, u_radius); + + // Inner edge at video boundary + float videoDist = roundedBoxSDF(center, videoSize * 0.5, u_radius); + + // Discard anything outside the outer bounds + if (outerDist > 0.0) { + discard; + } + + // Black border fills everything except outside the outer bounds + vec3 color = vec3(0.0); + float alpha = 1.0; + + // Simple antialiasing + float edge = min(abs(videoDist), abs(outerDist)); + float aa = smoothstep(0.0, 1.0, edge); + + fragColor = vec4(color, alpha * aa * u_opacity); +} diff --git a/app/src/room/gl/shaders/border.vert b/app/src/room/gl/shaders/border.vert new file mode 100644 index 00000000..0286f10e --- /dev/null +++ b/app/src/room/gl/shaders/border.vert @@ -0,0 +1,19 @@ +#version 300 es + +in vec2 a_position; + +uniform mat4 u_projection; +uniform vec4 u_bounds; // x, y, width, height +uniform float u_depth; + +out vec2 v_pos; // Position within the quad (0-1) + +void main() { + // Scale and translate to bounds + vec2 pos = a_position * u_bounds.zw + u_bounds.xy; + + // Apply projection + gl_Position = u_projection * vec4(pos, u_depth, 1.0); + + v_pos = a_position; +} diff --git a/app/src/room/gl/shaders/outline.frag b/app/src/room/gl/shaders/outline.frag new file mode 100644 index 00000000..78b679f8 --- /dev/null +++ b/app/src/room/gl/shaders/outline.frag @@ -0,0 +1,102 @@ +#version 300 es +precision highp float; + +in vec2 v_pos; + +uniform float u_radius; +uniform vec2 u_size; +uniform float u_opacity; +uniform float u_volume; // Audio volume 0-1 (smoothed) +uniform float u_border; // Border size in pixels +uniform vec3 u_color; // RGB color for the volume indicator +uniform float u_time; // Time in seconds for animation + +out vec4 fragColor; + +// Signed distance function for rounded rectangle +float roundedBoxSDF(vec2 center, vec2 size, float radius) { + vec2 q = abs(center) - size + radius; + return min(max(q.x, q.y), 0.0) + length(max(q, 0.0)) - radius; +} + +void main() { + if (u_opacity <= 0.01) { + discard; + } + + // v_pos is 0-1 in the quad + // u_size is the total quad size (video + maxExpansion on each side) + // u_border is the border width (black outline size) + + // Calculate position from center of the bounds + vec2 center = (v_pos - 0.5) * u_size; + + // The render bounds are expanded by 1.5x border, but we need to find the actual video size + // maxExpansion = border * 1.5, so video size = u_size - 2*maxExpansion = u_size - 3.0*border + vec2 videoSize = u_size - vec2(u_border * 3.0); + + // Inner edge at video boundary + float videoDist = roundedBoxSDF(center, videoSize * 0.5, u_radius); + + // Discard if 2px within video area, creating a border. + if (videoDist <= 2.0) { + discard; + } + + // Calculate angle around the perimeter for ripple effect + float angle = atan(center.y, center.x); + + // Ripple effect using triangle wave (linear/jagged) + float rippleFreq = 8.0; // Number of ripples around the perimeter + float rippleSpeed = 1.5; // Slower animation + float rippleAmount = u_volume * u_border * 0.1; // Ripple intensity (10% of border - more subtle) + + // Create ripple offset using triangle wave (sawtooth converted to triangle) + // This creates a linear back-and-forth motion instead of smooth sine + float phase = angle * rippleFreq + u_time * rippleSpeed; + float sawtooth = fract(phase / (2.0 * 3.14159265)); + float triangle = abs(sawtooth * 2.0 - 1.0) * 2.0 - 1.0; + float ripple = triangle * rippleAmount; + + // Base expansion from volume (0 to border) + float baseExpand = u_border * min(1.0, u_volume); + + // Apply ripple to the expansion (can go beyond border slightly) + float totalExpand = baseExpand + ripple; + + // Distance to the edge of the colored region + float colorDist = roundedBoxSDF(center, videoSize * 0.5 + totalExpand, u_radius); + + // Line configuration + float lineInset = 5.0; // Push line inward to hide behind video frame edge + float lineWidth = 3.0; // Solid line width + float aaWidth = 2.0; // Anti-aliasing width on each side + float totalWidth = lineWidth + aaWidth; + + // Discard if well outside the line region + if (colorDist > totalWidth || videoDist < -lineInset) { + discard; + } + + // In the colored region + vec3 finalColor = u_color; + float finalAlpha = 0.3 + u_volume * 0.4; + + // Create a sharp line with AA on edges, inset from video boundary + float innerEdge = videoDist + lineInset; // Offset inward + float outerEdge = abs(colorDist); + + // Fade in from the inset edge over aaWidth + float innerAA = smoothstep(0.0, aaWidth, innerEdge); + + // Full opacity in the middle of the line + float lineMask = step(outerEdge, lineWidth); + + // Fade out at the outer edge over aaWidth + float outerAA = smoothstep(lineWidth + aaWidth, lineWidth, outerEdge); + + // Combine: AA at inner edge, full in middle, AA at outer edge + float aa = innerAA * mix(outerAA, 1.0, lineMask); + + fragColor = vec4(finalColor, finalAlpha * aa * u_opacity); +} diff --git a/app/src/room/gl/shaders/outline.vert b/app/src/room/gl/shaders/outline.vert new file mode 100644 index 00000000..0286f10e --- /dev/null +++ b/app/src/room/gl/shaders/outline.vert @@ -0,0 +1,19 @@ +#version 300 es + +in vec2 a_position; + +uniform mat4 u_projection; +uniform vec4 u_bounds; // x, y, width, height +uniform float u_depth; + +out vec2 v_pos; // Position within the quad (0-1) + +void main() { + // Scale and translate to bounds + vec2 pos = a_position * u_bounds.zw + u_bounds.xy; + + // Apply projection + gl_Position = u_projection * vec4(pos, u_depth, 1.0); + + v_pos = a_position; +} diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 75f86224..9a5d0835 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -3,7 +3,9 @@ import { Effect, Signal } from "@kixelated/signals"; import { Broadcast, BroadcastSource } from "./broadcast"; import type { Canvas } from "./canvas"; import { Vector } from "./geometry"; +import { BorderRenderer } from "./gl/border"; import { BroadcastRenderer } from "./gl/broadcast"; +import { OutlineRenderer } from "./gl/outline"; import type { Sound } from "./sound"; export type SpaceProps = { @@ -31,7 +33,9 @@ export class Space { #maxZ = 0; - // WebGL renderer + // WebGL renderers + #borderRenderer: BorderRenderer; + #outlineRenderer: OutlineRenderer; #broadcastRenderer: BroadcastRenderer; // Touch handling for mobile @@ -46,7 +50,9 @@ export class Space { this.sound = sound; this.profile = props?.profile ?? false; - // Initialize WebGL renderer + // Initialize WebGL renderers + this.#borderRenderer = new BorderRenderer(canvas); + this.#outlineRenderer = new OutlineRenderer(canvas); this.#broadcastRenderer = new BroadcastRenderer(canvas); // Use the new eventListener helper that automatically handles cleanup @@ -559,17 +565,26 @@ export class Space { const broadcasts = this.ordered.peek(); - // TODO: Render audio visualization backgrounds - // for (const broadcast of broadcasts) { - // // Audio background rendering - // } + // Render in order: black borders (back) -> audio viz (middle) -> videos (front) + // This way audio viz shows through overlapping black borders - // TODO: Render audio visualization - // for (const broadcast of broadcasts) { - // // Audio visualization rendering - // } + // 1. Render black borders (furthest back) + for (const broadcast of this.#rip) { + this.#borderRenderer.render(broadcast, this.canvas.camera, this.#maxZ); + } + for (const broadcast of broadcasts) { + this.#borderRenderer.render(broadcast, this.canvas.camera, this.#maxZ); + } + + // 2. Render audio visualizations (middle layer) + for (const broadcast of this.#rip) { + this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); + } + for (const broadcast of broadcasts) { + this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); + } - // Render broadcasts fading out + // 3. Render video content (front layer) for (const broadcast of this.#rip) { this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ); } @@ -641,6 +656,8 @@ export class Space { this.lookup.clear(); // Cleanup WebGL resources + this.#borderRenderer.cleanup(); + this.#outlineRenderer.cleanup(); this.#broadcastRenderer.cleanup(); } diff --git a/app/src/room/video.ts b/app/src/room/video.ts index a4174bef..249d9944 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -32,7 +32,7 @@ export class Video { #nameOpacity = 0; // Cached meme bounds (x_offset, y_offset, width_scale, height_scale) - memeBounds = { x: 0, y: 0, width: 1, height: 1 }; + memeBounds?: { x: number; y: number; width: number; height: number }; // WebGL textures for this broadcast webcamTexture: WebGLTexture; // Video texture @@ -151,6 +151,8 @@ export class Video { #runMemeBounds(effect: Effect) { const meme = effect.get(this.broadcast.meme); if (!meme || !(meme instanceof HTMLVideoElement)) { + // Clear memeBounds when no meme + this.memeBounds = undefined; return; } @@ -226,10 +228,12 @@ export class Video { } // Calculate offset in texture coordinates (0-1 range) - this.memeBounds.x = (1.0 - width) * xPos; - this.memeBounds.y = (1.0 - height) * yPos; - this.memeBounds.width = width; - this.memeBounds.height = height; + this.memeBounds = { + x: (1.0 - width) * xPos, + y: (1.0 - height) * yPos, + width: width, + height: height, + }; } #frameToTexture(src: VideoFrame, dst: WebGLTexture) { From 175248a81cd3ad76001aef368240149d48de3a1a Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 14:38:06 -0700 Subject: [PATCH 06/19] Better rendering. --- app/src/room/canvas.ts | 131 ++++++++++++---------------------- app/src/room/gl/background.ts | 4 +- app/src/room/gl/broadcast.ts | 2 +- app/src/room/gl/common.ts | 27 +++++++ app/src/room/gl/context.ts | 3 + app/src/room/gl/outline.ts | 8 +-- app/src/room/space.ts | 6 +- app/src/room/video.ts | 54 +++++++++----- 8 files changed, 117 insertions(+), 118 deletions(-) create mode 100644 app/src/room/gl/common.ts diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index 9ab0e1c0..b53737ba 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -32,6 +32,10 @@ export class Canvas { return this.#glContext.gl; } + get glContext(): GLContext { + return this.#glContext; + } + get camera() { return this.#camera; } @@ -48,68 +52,41 @@ export class Canvas { this.#camera = new Camera(); this.#backgroundRenderer = new BackgroundRenderer(this.#glContext); - const resize = () => { - // Check if we're in fullscreen or fixed position - const isFullscreen = document.fullscreenElement === this.#canvas; - const style = window.getComputedStyle(this.#canvas); - const isFixed = style.position === "fixed"; - - let newWidth: number; - let newHeight: number; - - if (isFullscreen || isFixed) { - // Use window dimensions for fullscreen or fixed position - newWidth = window.innerWidth; - newHeight = window.innerHeight; - } else { - // Use parent container dimensions - const parent = this.#canvas.parentElement; - if (!parent) return; - - const rect = parent.getBoundingClientRect(); - newWidth = rect.width; - newHeight = rect.height; - } + const resize = (entries: ResizeObserverEntry[]) => { + for (const entry of entries) { + // Get device pixel dimensions + const dpr = window.devicePixelRatio; + const width = entry.devicePixelContentBoxSize?.[0].inlineSize ?? + entry.contentBoxSize[0].inlineSize * dpr; + const height = entry.devicePixelContentBoxSize?.[0].blockSize ?? + entry.contentBoxSize[0].blockSize * dpr; - newWidth *= window.devicePixelRatio; - newHeight *= window.devicePixelRatio; + const newWidth = Math.max(1, Math.floor(width)); + const newHeight = Math.max(1, Math.floor(height)); - // Only update canvas if dimensions actually changed - // This prevents the canvas from being cleared when layout changes don't affect size - if (this.#canvas.width === newWidth && this.#canvas.height === newHeight) { - return; - } + // Only update canvas if dimensions actually changed + if (this.#canvas.width === newWidth && this.#canvas.height === newHeight) { + return; + } - this.#canvas.width = newWidth; - this.#canvas.height = newHeight; + this.#canvas.width = newWidth; + this.#canvas.height = newHeight; - // Update WebGL viewport - this.#glContext.resize(newWidth, newHeight); + // Update WebGL viewport + this.#glContext.resize(newWidth, newHeight); - // The internal logic ignores devicePixelRatio because we automatically scale when rendering. - const viewport = Vector.create( - this.#canvas.width / window.devicePixelRatio, - this.#canvas.height / window.devicePixelRatio, - ); - this.viewport.set(viewport); + // The internal logic ignores devicePixelRatio because we automatically scale when rendering. + const viewport = Vector.create(newWidth / dpr, newHeight / dpr); + this.viewport.set(viewport); - // Update camera projection - this.#camera.updateOrtho(viewport); - }; + // Update camera projection + this.#camera.updateOrtho(viewport); - let resizeTimeout: ReturnType | undefined; - - const scheduleResize = () => { - // Clear any existing timeout - if (resizeTimeout) { - clearTimeout(resizeTimeout); + // Render immediately to avoid black flicker during resize + if (this.visible.peek()) { + this.#render(performance.now()); + } } - - // Debounce resize to prevent flickering during rapid changes - resizeTimeout = setTimeout(() => { - resize(); - resizeTimeout = undefined; - }, 50); }; const visible = () => { @@ -118,43 +95,20 @@ export class Canvas { visible(); - // Set up ResizeObserver for parent when canvas is added to DOM - let resizeObserver: ResizeObserver | null = null; - - const setupParentObserver = () => { - const parent = this.#canvas.parentElement; - if (parent && !resizeObserver) { - resizeObserver = new ResizeObserver(scheduleResize); - resizeObserver.observe(parent); - resize(); - } - }; - - // Try to set up observer immediately if already in DOM - setupParentObserver(); - - // Watch for canvas being added to DOM - const mutationObserver = new MutationObserver(() => { - if (this.#canvas.parentElement) { - setupParentObserver(); - mutationObserver.disconnect(); - } - }); - - if (!this.#canvas.parentElement) { - mutationObserver.observe(document.body, { childList: true, subtree: true }); + // Set up ResizeObserver for canvas + const resizeObserver = new ResizeObserver(resize); + try { + // Try to observe device-pixel-content-box for pixel-perfect sizing + resizeObserver.observe(this.#canvas, { box: "device-pixel-content-box" }); + } catch { + // Fallback to content-box if device-pixel-content-box is not supported + resizeObserver.observe(this.#canvas, { box: "content-box" }); } this.#signals.event(document, "visibilitychange", visible); this.#signals.cleanup(() => { - if (resizeObserver) { - resizeObserver.disconnect(); - } - mutationObserver.disconnect(); - if (resizeTimeout) { - clearTimeout(resizeTimeout); - } + resizeObserver.disconnect(); }); // Only render the canvas when it's visible. @@ -168,11 +122,14 @@ export class Canvas { } #render(now: DOMHighResTimeStamp) { + // Update common uniforms for this frame + this.#glContext.uniforms.update(now); + // Clear the screen this.#glContext.clear(); // Render background with shader - this.#backgroundRenderer.render(now); + this.#backgroundRenderer.render(); // TODO: Render demo text if enabled // if (this.demo.peek()) { diff --git a/app/src/room/gl/background.ts b/app/src/room/gl/background.ts index f4c6ada1..ed1f9470 100644 --- a/app/src/room/gl/background.ts +++ b/app/src/room/gl/background.ts @@ -52,13 +52,13 @@ export class BackgroundRenderer { gl.bindVertexArray(null); } - render(now: DOMHighResTimeStamp) { + render() { const gl = this.#glContext.gl; const viewport = this.#glContext.viewport.peek(); this.#program.use(); this.#u_resolution.set(viewport.x, viewport.y); - this.#u_time.set(now); + this.#u_time.set(this.#glContext.uniforms.time); gl.bindVertexArray(this.#vao); gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index 8ef515a2..918cc9a7 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -210,7 +210,7 @@ export class BroadcastRenderer { this.#u_memeOpacity.set(broadcast.video.memeOpacity); // Use pre-computed meme bounds from Video class - this.#u_memeBounds.set(memeBounds.x, memeBounds.y, memeBounds.width, memeBounds.height); + this.#u_memeBounds.set(memeBounds.position.x, memeBounds.position.y, memeBounds.size.x, memeBounds.size.y); } else { this.#u_hasMeme.set(0); } diff --git a/app/src/room/gl/common.ts b/app/src/room/gl/common.ts new file mode 100644 index 00000000..f02d69e9 --- /dev/null +++ b/app/src/room/gl/common.ts @@ -0,0 +1,27 @@ +/** + * Manages common uniform values shared across multiple shaders. + * Computes values like time once per frame. + */ +export class CommonUniforms { + #startTime: number; + #currentTime: number = 0; + + constructor() { + this.#startTime = performance.now(); + } + + /** + * Update computed values for the current frame. + * Call this once per frame before rendering. + */ + update(now: DOMHighResTimeStamp) { + this.#currentTime = (now - this.#startTime) / 1000; + } + + /** + * Get time value in seconds since creation. + */ + get time(): number { + return this.#currentTime; + } +} diff --git a/app/src/room/gl/context.ts b/app/src/room/gl/context.ts index ff5e3ee5..7b765484 100644 --- a/app/src/room/gl/context.ts +++ b/app/src/room/gl/context.ts @@ -1,10 +1,12 @@ import { Signal } from "@kixelated/signals"; import { Vector } from "../geometry"; +import { CommonUniforms } from "./common"; export class GLContext { gl: WebGL2RenderingContext; canvas: HTMLCanvasElement; viewport: Signal; + uniforms: CommonUniforms; constructor(canvas: HTMLCanvasElement, viewport: Signal) { const gl = canvas.getContext("webgl2", { @@ -21,6 +23,7 @@ export class GLContext { this.gl = gl; this.canvas = canvas; this.viewport = viewport; + this.uniforms = new CommonUniforms(); // Enable depth testing for z-index ordering gl.enable(gl.DEPTH_TEST); diff --git a/app/src/room/gl/outline.ts b/app/src/room/gl/outline.ts index ab406f99..e78c49c6 100644 --- a/app/src/room/gl/outline.ts +++ b/app/src/room/gl/outline.ts @@ -27,8 +27,6 @@ export class OutlineRenderer { // Typed attributes #a_position: Attribute; - #startTime: number; - constructor(canvas: Canvas) { this.#canvas = canvas; this.#program = new Shader(canvas.gl, outlineVertSource, outlineFragSource); @@ -60,7 +58,6 @@ export class OutlineRenderer { if (!indexBuffer) throw new Error("Failed to create index buffer"); this.#indexBuffer = indexBuffer; - this.#startTime = performance.now(); this.#setupBuffers(); } @@ -97,7 +94,7 @@ export class OutlineRenderer { gl.bindVertexArray(null); } - render(broadcast: Broadcast, camera: Camera, maxZ: number, now: DOMHighResTimeStamp) { + render(broadcast: Broadcast, camera: Camera, maxZ: number) { const gl = this.#canvas.gl; const bounds = broadcast.bounds.peek(); const scale = broadcast.zoom.peek(); @@ -148,8 +145,7 @@ export class OutlineRenderer { this.#u_border.set(border); // Set time for animation - const time = (now - this.#startTime) / 1000; - this.#u_time.set(time); + this.#u_time.set(this.#canvas.glContext.uniforms.time); // Set color based on volume using HSL from old implementation // hue = 180 + volume * 120 diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 9a5d0835..66f94af2 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -557,7 +557,7 @@ export class Space { } // Render using WebGL - #render(now: DOMHighResTimeStamp) { + #render() { // TODO: Render the audio click prompt if audio is suspended // if (this.sound.suspended.peek() && !this.profile) { // this.#renderAudioPrompt(); @@ -578,10 +578,10 @@ export class Space { // 2. Render audio visualizations (middle layer) for (const broadcast of this.#rip) { - this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); + this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ); } for (const broadcast of broadcasts) { - this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); + this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ); } // 3. Render video content (front layer) diff --git a/app/src/room/video.ts b/app/src/room/video.ts index 249d9944..1ea06f9a 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -1,10 +1,9 @@ -import { Publish, Watch } from "@kixelated/hang"; import { Effect, Signal } from "@kixelated/signals"; import * as Api from "../api"; import type { Broadcast } from "./broadcast"; import { FakeBroadcast } from "./fake"; -import { Vector } from "./geometry"; -import { MEME_AUDIO, MEME_AUDIO_LOOKUP, MEME_VIDEO, MEME_VIDEO_LOOKUP, type MemeVideoName } from "./meme"; +import { Bounds, Vector } from "./geometry"; +import { MEME_VIDEO, MEME_VIDEO_LOOKUP, type MemeVideoName } from "./meme"; //export type VideoSource = Watch.Video.Source | Publish.Video.Encoder; @@ -31,8 +30,11 @@ export class Video { #memeOpacity = 0; #nameOpacity = 0; + // Signal that updates when meme video dimensions are loaded + #memeSize = new Signal(undefined); + // Cached meme bounds (x_offset, y_offset, width_scale, height_scale) - memeBounds?: { x: number; y: number; width: number; height: number }; + memeBounds?: Bounds; // WebGL textures for this broadcast webcamTexture: WebGLTexture; // Video texture @@ -146,22 +148,34 @@ export class Video { if (!(meme instanceof HTMLVideoElement)) return; this.#videoToTexture(effect, meme, this.memeTexture); + + // Listen for loadedmetadata event to update meme size when dimensions are available + const updateSize = () => { + if (meme.videoWidth > 0 && meme.videoHeight > 0) { + effect.set(this.#memeSize, Vector.create(meme.videoWidth, meme.videoHeight)); + } + }; + + // Check if already loaded + if (meme.readyState >= 1) { + updateSize(); + } + + // Listen for metadata load + effect.event(meme, "loadedmetadata", updateSize); } #runMemeBounds(effect: Effect) { const meme = effect.get(this.broadcast.meme); - if (!meme || !(meme instanceof HTMLVideoElement)) { - // Clear memeBounds when no meme - this.memeBounds = undefined; - return; - } + if (!meme || !(meme instanceof HTMLVideoElement)) return; + + // Wait until meme dimensions are available + const memeSize = effect.get(this.#memeSize); + if (!memeSize) return; // Also react to bounds changes const bounds = effect.get(this.broadcast.bounds); - // Wait until video metadata is loaded - if (meme.videoWidth === 0 || meme.videoHeight === 0) return; - // Get meme configuration const memeName = effect.get(this.broadcast.memeName); let fit: "contain" | "cover" = "cover"; @@ -178,7 +192,7 @@ export class Video { } // Calculate meme bounds based on fit and position - const aspectRatio = meme.videoWidth / meme.videoHeight; + const aspectRatio = memeSize.x / memeSize.y; const boundsAspectRatio = bounds.size.x / bounds.size.y; let width: number; let height: number; @@ -228,12 +242,14 @@ export class Video { } // Calculate offset in texture coordinates (0-1 range) - this.memeBounds = { - x: (1.0 - width) * xPos, - y: (1.0 - height) * yPos, - width: width, - height: height, - }; + this.memeBounds = new Bounds( + Vector.create((1.0 - width) * xPos, (1.0 - height) * yPos), + Vector.create(width, height), + ); + + effect.cleanup(() => { + this.memeBounds = undefined; + }); } #frameToTexture(src: VideoFrame, dst: WebGLTexture) { From a4c1918ed7b2a3d592b23240c8e00906bc83a5b2 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 14:39:55 -0700 Subject: [PATCH 07/19] Scale better. --- app/src/room/gl/shaders/outline.frag | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/src/room/gl/shaders/outline.frag b/app/src/room/gl/shaders/outline.frag index 78b679f8..007dc661 100644 --- a/app/src/room/gl/shaders/outline.frag +++ b/app/src/room/gl/shaders/outline.frag @@ -67,10 +67,10 @@ void main() { // Distance to the edge of the colored region float colorDist = roundedBoxSDF(center, videoSize * 0.5 + totalExpand, u_radius); - // Line configuration - float lineInset = 5.0; // Push line inward to hide behind video frame edge - float lineWidth = 3.0; // Solid line width - float aaWidth = 2.0; // Anti-aliasing width on each side + // Line configuration (as percentage of border) + float lineInset = u_border * 0.42; // Push line inward to hide behind video frame edge + float lineWidth = u_border * 0.25; // Solid line width + float aaWidth = u_border * 0.17; // Anti-aliasing width on each side float totalWidth = lineWidth + aaWidth; // Discard if well outside the line region From 744b2f79ceb1713e517dc7064fa537e82197015d Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 15:01:07 -0700 Subject: [PATCH 08/19] Add a setting for pixel ratio. --- app/src/room/canvas.ts | 30 ++++++++++++--------- app/src/settings.tsx | 61 +++++++++++++++++++++++++++++++++++++++--- 2 files changed, 74 insertions(+), 17 deletions(-) diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index b53737ba..0e81b068 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -1,4 +1,5 @@ import { Effect, Signal } from "@kixelated/signals"; +import Settings from "../settings"; import { Vector } from "./geometry"; import { BackgroundRenderer } from "./gl/background"; import { Camera } from "./gl/camera"; @@ -54,12 +55,13 @@ export class Canvas { const resize = (entries: ResizeObserverEntry[]) => { for (const entry of entries) { - // Get device pixel dimensions - const dpr = window.devicePixelRatio; - const width = entry.devicePixelContentBoxSize?.[0].inlineSize ?? - entry.contentBoxSize[0].inlineSize * dpr; - const height = entry.devicePixelContentBoxSize?.[0].blockSize ?? - entry.contentBoxSize[0].blockSize * dpr; + // Get device pixel dimensions using the user's configured ratio + const dpr = Settings.rendering.devicePixelRatio.peek(); + + // Always use contentBoxSize and multiply by our custom ratio + // to ensure we respect the user's setting + const width = entry.contentBoxSize[0].inlineSize * dpr; + const height = entry.contentBoxSize[0].blockSize * dpr; const newWidth = Math.max(1, Math.floor(width)); const newHeight = Math.max(1, Math.floor(height)); @@ -96,17 +98,19 @@ export class Canvas { visible(); // Set up ResizeObserver for canvas + // Use content-box so we can apply our custom devicePixelRatio setting const resizeObserver = new ResizeObserver(resize); - try { - // Try to observe device-pixel-content-box for pixel-perfect sizing - resizeObserver.observe(this.#canvas, { box: "device-pixel-content-box" }); - } catch { - // Fallback to content-box if device-pixel-content-box is not supported - resizeObserver.observe(this.#canvas, { box: "content-box" }); - } + resizeObserver.observe(this.#canvas, { box: "content-box" }); this.#signals.event(document, "visibilitychange", visible); + // Trigger resize when devicePixelRatio setting changes + this.#signals.subscribe(Settings.rendering.devicePixelRatio, () => { + // Force a resize by temporarily disconnecting and reconnecting + resizeObserver.disconnect(); + resizeObserver.observe(this.#canvas, { box: "content-box" }); + }); + this.#signals.cleanup(() => { resizeObserver.disconnect(); }); diff --git a/app/src/settings.tsx b/app/src/settings.tsx index 1d8b357c..ea04989a 100644 --- a/app/src/settings.tsx +++ b/app/src/settings.tsx @@ -72,6 +72,20 @@ export const Settings = { step: new Signal(Number.parseInt(localStorage.getItem("settings.tutorial.step") ?? "0", 10)), }, + // Rendering settings + rendering: { + devicePixelRatio: new Signal((() => { + const stored = localStorage.getItem("settings.rendering.devicePixelRatio"); + if (stored) { + const parsed = Number.parseFloat(stored); + if (!Number.isNaN(parsed) && parsed > 0 && parsed <= window.devicePixelRatio) { + return parsed; + } + } + return Math.max(1, window.devicePixelRatio / 2); + })()), + }, + clear: () => { localStorage.clear(); window.location.reload(); @@ -190,6 +204,10 @@ effect.subscribe(Settings.tutorial.step, (step) => { localStorage.setItem("settings.tutorial.step", step.toString()); }); +effect.subscribe(Settings.rendering.devicePixelRatio, (ratio) => { + localStorage.setItem("settings.rendering.devicePixelRatio", ratio.toString()); +}); + // Mostly just to avoid console warnings about signals not being closed document.addEventListener("unload", () => { effect.close(); @@ -201,6 +219,16 @@ export function Modal(props: { sound: Sound }): JSX.Element { const draggable = solid(Settings.draggable); const tts = createSelector(solid(Settings.audio.tts)); const webGPUSupported = supportsWebGPU(); + const devicePixelRatio = solid(Settings.rendering.devicePixelRatio); + const maxDevicePixelRatio = window.devicePixelRatio; + + // Calculate available pixel ratio options (0.5x, 1x, 2x, 4x, 8x) + const pixelRatioOptions: number[] = [0.5]; + for (let i = 1; i <= maxDevicePixelRatio; i *= 2) { + pixelRatioOptions.push(i); + } + + const isSelectedRatio = createSelector(() => devicePixelRatio()); const progress = solid(props.sound.tts.progress); const [isGenerating, setIsGenerating] = createSignal(false); @@ -227,7 +255,7 @@ export function Modal(props: { sound: Sound }): JSX.Element {
-
+
Announce Join/Leave {tts("none") && "No voice announcements"} @@ -261,7 +289,7 @@ export function Modal(props: { sound: Sound }): JSX.Element { )}
-
+
+ {/* Device Pixel Ratio */} +
+
+ +
+
+ Pixel Ratio + Decrease for better performance +
+
+ {pixelRatioOptions.map((ratio) => ( + + ))} +
+
-
+
Remote Control Allow others to drag/resize your camera
@@ -350,7 +403,7 @@ export function Modal(props: { sound: Sound }): JSX.Element { type="checkbox" checked={draggable()} onChange={() => Settings.draggable.update((p) => !p)} - class="cursor-pointer accent-blue-500 group-hover:accent-blue-400 transition-colors flex-grow" + class="cursor-pointer accent-blue-500 group-hover:accent-blue-400 transition-colors w-18" />
From 1c6cd0bbabf1fe3e4d216754bdb8de4f1973c8e6 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 15:14:02 -0700 Subject: [PATCH 09/19] More improvements. --- app/src/room/canvas.ts | 14 +++++++++----- app/src/room/gl/border.ts | 2 +- app/src/room/gl/broadcast.ts | 2 +- app/src/room/gl/outline.ts | 2 +- app/src/room/space.ts | 9 ++++----- moq | 2 +- 6 files changed, 17 insertions(+), 14 deletions(-) diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index 0e81b068..8065f64d 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -17,7 +17,6 @@ export class Canvas { // Use a callback to render after the background. onRender?: (now: DOMHighResTimeStamp) => void; - #animate?: number; visible: Signal; viewport: Signal; @@ -120,8 +119,15 @@ export class Canvas { const visible = effect.get(this.visible); if (!visible) return; - this.#animate = requestAnimationFrame(this.#render.bind(this)); - effect.cleanup(() => cancelAnimationFrame(this.#animate ?? 0)); + let cancel: number; + const render = (now: DOMHighResTimeStamp) => { + this.#render(now); + cancel = requestAnimationFrame(render); + }; + + cancel = requestAnimationFrame(render); + + effect.cleanup(() => cancelAnimationFrame(cancel)); }); } @@ -148,8 +154,6 @@ export class Canvas { console.error("render error", err); } } - - this.#animate = requestAnimationFrame(this.#render.bind(this)); } // TODO: Implement demo text rendering with WebGL diff --git a/app/src/room/gl/border.ts b/app/src/room/gl/border.ts index 40928b22..a517b214 100644 --- a/app/src/room/gl/border.ts +++ b/app/src/room/gl/border.ts @@ -133,7 +133,7 @@ export class BorderRenderer { gl.bindVertexArray(null); } - cleanup() { + close() { const gl = this.#canvas.gl; gl.deleteVertexArray(this.#vao); gl.deleteBuffer(this.#positionBuffer); diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index 918cc9a7..14ca4f09 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -221,7 +221,7 @@ export class BroadcastRenderer { gl.bindVertexArray(null); } - cleanup() { + close() { const gl = this.#canvas.gl; gl.deleteVertexArray(this.#vao); gl.deleteBuffer(this.#positionBuffer); diff --git a/app/src/room/gl/outline.ts b/app/src/room/gl/outline.ts index e78c49c6..be42473a 100644 --- a/app/src/room/gl/outline.ts +++ b/app/src/room/gl/outline.ts @@ -180,7 +180,7 @@ export class OutlineRenderer { gl.bindVertexArray(null); } - cleanup() { + close() { const gl = this.#canvas.gl; gl.deleteVertexArray(this.#vao); gl.deleteBuffer(this.#positionBuffer); diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 66f94af2..bf614971 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -641,6 +641,10 @@ export class Space { } close() { + this.#borderRenderer.close(); + this.#outlineRenderer.close(); + this.#broadcastRenderer.close(); + this.#signals.close(); for (const broadcast of this.ordered.peek()) { @@ -654,11 +658,6 @@ export class Space { this.#rip = []; this.ordered.set([]); this.lookup.clear(); - - // Cleanup WebGL resources - this.#borderRenderer.cleanup(); - this.#outlineRenderer.cleanup(); - this.#broadcastRenderer.cleanup(); } // Publish the current position to the network. diff --git a/moq b/moq index c4ee0b6b..20beccf7 160000 --- a/moq +++ b/moq @@ -1 +1 @@ -Subproject commit c4ee0b6bdb83c4aea3f028b35f03ff51f5ca719f +Subproject commit 20beccf730629bf980b4b245a21c2ab56b00188d From b1e5c5834bbdbd1a20a92be40fc26ffda88f1204 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 15:42:30 -0700 Subject: [PATCH 10/19] Add locator. --- app/index.css | 15 ++++++ app/src/room/broadcast.ts | 8 --- app/src/room/index.ts | 52 ++++++++++++++++--- app/src/room/locator.ts | 105 ++++++++++++++++++++++++++++++++++++++ app/src/room/space.ts | 7 --- 5 files changed, 164 insertions(+), 23 deletions(-) create mode 100644 app/src/room/locator.ts diff --git a/app/index.css b/app/index.css index 2e0e1592..46a0d6ec 100644 --- a/app/index.css +++ b/app/index.css @@ -161,3 +161,18 @@ main { input[type="range"] { accent-color: hsl(var(--link-hue), 75%, 50%); } + +/* Locator throb animation */ +@keyframes throb { + 0%, + 100% { + transform: scale(1); + } + 50% { + transform: scale(1.1); + } +} + +.animate-throb { + animation: throb 2s ease-in-out infinite; +} diff --git a/app/src/room/broadcast.ts b/app/src/room/broadcast.ts index bae4b44b..2b0c1463 100644 --- a/app/src/room/broadcast.ts +++ b/app/src/room/broadcast.ts @@ -67,9 +67,6 @@ export class Broadcast { scale: Signal; // room scale, 1 is 100% zoom = new Signal(1.0); // local zoom, 1 is 100% - // Show a locator arrow for 8 seconds to show our position on join. - #locatorStart?: DOMHighResTimeStamp; - signals = new Effect(); constructor(props: BroadcastProps) { @@ -282,11 +279,6 @@ export class Broadcast { return false; } - // TODO: Implement locator arrow with WebGL - // renderLocator(now: DOMHighResTimeStamp) { - // // Render "YOU" arrow above broadcast - // } - close() { this.signals.close(); this.audio.close(); diff --git a/app/src/room/index.ts b/app/src/room/index.ts index 7fc55066..d13673ae 100644 --- a/app/src/room/index.ts +++ b/app/src/room/index.ts @@ -1,9 +1,11 @@ import { Publish, Watch } from "@kixelated/hang"; import * as Moq from "@kixelated/moq"; -import { Effect } from "@kixelated/signals"; +import { Effect, Signal } from "@kixelated/signals"; import Settings from "../settings"; +import { Broadcast } from "./broadcast"; import type { Canvas } from "./canvas"; import { Local } from "./local"; +import { Locator } from "./locator"; import { Space } from "./space"; export interface RoomProps { @@ -24,6 +26,9 @@ export class Room { // The physics space for the room. space: Space; + #cameraBroadcast = new Signal | undefined>(undefined); + #shareBroadcast = new Signal | undefined>(undefined); + #signals = new Effect(); constructor(props: RoomProps) { @@ -54,6 +59,30 @@ export class Room { this.#signals.timer(() => { this.space.sound?.tts.enabled.set(true); }, 1000); + + // Manage the locator for the camera broadcast + this.#signals.effect((effect) => { + const cameraBroadcast = effect.get(this.#cameraBroadcast); + if (!cameraBroadcast) return; + + const locator = new Locator(cameraBroadcast); + effect.cleanup(() => locator.close()); + + // Auto-close after 8 seconds (7s visible + 1s fade transition) + effect.timer(() => locator.close(), 8000); + }); + + // Manage the locator for the share broadcast + this.#signals.effect((effect) => { + const shareBroadcast = effect.get(this.#shareBroadcast); + if (!shareBroadcast) return; + + const locator = new Locator(shareBroadcast); + effect.cleanup(() => locator.close()); + + // Auto-close after 8 seconds (7s visible + 1s fade transition) + effect.timer(() => locator.close(), 8000); + }); } async #run(announced: Moq.Announced) { @@ -61,20 +90,27 @@ export class Room { const update = await announced.next(); if (!update) break; - let local: Publish.Broadcast | undefined; if (update.path === this.local.camera.path.peek()) { - local = this.local.camera; - } else if (update.path === this.local.share.path.peek()) { - local = this.local.share; + if (update.active) { + const broadcast = this.space.add(update.path, this.local.camera); + this.#cameraBroadcast.set(broadcast as Broadcast); + } else { + this.space.remove(update.path); + this.#cameraBroadcast.set(undefined); + } + + continue; } - if (local) { + if (update.path === this.local.share.path.peek()) { if (update.active) { - this.space.add(update.path, local); + const broadcast = this.space.add(update.path, this.local.share); + this.#shareBroadcast.set(broadcast as Broadcast); } else { - // NOTE: We don't close local sources so we can toggle them. this.space.remove(update.path); + this.#shareBroadcast.set(undefined); } + continue; } diff --git a/app/src/room/locator.ts b/app/src/room/locator.ts new file mode 100644 index 00000000..ff1209f9 --- /dev/null +++ b/app/src/room/locator.ts @@ -0,0 +1,105 @@ +import type { Publish } from "@kixelated/hang"; +import { Effect, Signal } from "@kixelated/signals"; +import * as DOM from "@kixelated/signals/dom"; +import type { Broadcast } from "./broadcast"; +import { Bounds, Vector } from "./geometry"; + +export class Locator { + broadcast: Broadcast; + signals = new Effect(); + + #visible = new Signal(true); + + constructor(broadcast: Broadcast) { + this.broadcast = broadcast; + + this.signals.effect(this.#render.bind(this)); + + // Start fading out after 7 seconds + this.signals.timer(() => { + this.#visible.set(false); + }, 7000); + } + + #render(effect: Effect) { + // Container for arrow and text + const root = DOM.create("div", { + className: "fixed pointer-events-none transition-opacity duration-1000 animate-throb", + }); + + // Arrow pointing down + const arrow = DOM.create("div", { + className: "absolute left-1/2 -translate-x-1/2", + }); + + // Triangle SVG for the arrow + const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); + svg.setAttribute("width", "32"); + svg.setAttribute("height", "32"); + svg.setAttribute("viewBox", "0 0 32 32"); + + const path = document.createElementNS("http://www.w3.org/2000/svg", "path"); + path.setAttribute("d", "M16 28 L8 16 L24 16 Z"); + path.setAttribute("fill", "#FFD700"); + path.setAttribute("stroke", "#000"); + path.setAttribute("stroke-width", "2"); + + svg.appendChild(path); + arrow.appendChild(svg); + root.appendChild(arrow); + + // "YOU" text above the arrow + const text = DOM.create("div", { + className: + "absolute left-1/2 -translate-x-1/2 -top-8 font-bold text-2xl text-[#FFD700] [text-shadow:_0_0_8px_rgb(0_0_0_/_80%),_2px_2px_4px_rgb(0_0_0)]", + textContent: "YOU", + }); + root.appendChild(text); + + // Update the position based on broadcast bounds + const updatePosition = (bounds: Bounds, viewport: Vector) => { + const canvasRect = this.broadcast.canvas.element.getBoundingClientRect(); + + // Scale bounds from canvas coordinates to page coordinates + const scaleX = canvasRect.width / viewport.x; + const scaleY = canvasRect.height / viewport.y; + + // Calculate position above the broadcast + const x = (bounds.position.x + bounds.size.x / 2) * scaleX + canvasRect.left; + const y = bounds.position.y * scaleY + canvasRect.top; + + // Position the locator, with some gap above the broadcast + const gap = 60; // Distance above the broadcast + const top = Math.max(canvasRect.top, y - gap); + const left = Math.min(Math.max(canvasRect.left, x), canvasRect.left + canvasRect.width); + + root.style.left = `${left}px`; + root.style.top = `${top}px`; + }; + + // Update position when bounds or viewport change + effect.effect((effect) => { + const bounds = effect.get(this.broadcast.bounds); + const viewport = effect.get(this.broadcast.canvas.viewport); + updatePosition(bounds, viewport); + }); + + // Set z-index based on broadcast z-index + effect.effect((effect) => { + const z = effect.get(this.broadcast.position).z; + root.style.zIndex = `${100 + z}`; + }); + + // Control opacity based on visible signal + effect.effect((effect) => { + const visible = effect.get(this.#visible); + root.style.opacity = visible ? "1" : "0"; + }); + + DOM.render(effect, document.body, root); + } + + close() { + this.signals.close(); + } +} diff --git a/app/src/room/space.ts b/app/src/room/space.ts index bf614971..f9b43adc 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -604,13 +604,6 @@ export class Space { dragging: true, }); } - - // TODO: Render the locator arrows for our broadcasts on join - // for (const broadcast of broadcasts) { - // if (broadcast.source instanceof Publish.Broadcast) { - // broadcast.renderLocator(now); - // } - // } } // TODO: Implement audio prompt with WebGL or DOM overlay From 37e01f103c078ec28db010d154f87d83d5fdeb4f Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Mon, 6 Oct 2025 15:52:39 -0700 Subject: [PATCH 11/19] Render the username with HTML/CSS. --- app/src/room/broadcast.ts | 4 + app/src/room/name.ts | 98 ++++++++++++++++ app/src/room/space.ts | 44 +++++-- app/src/room/video.ts | 240 -------------------------------------- 4 files changed, 139 insertions(+), 247 deletions(-) create mode 100644 app/src/room/name.ts diff --git a/app/src/room/broadcast.ts b/app/src/room/broadcast.ts index 2b0c1463..5e4d28aa 100644 --- a/app/src/room/broadcast.ts +++ b/app/src/room/broadcast.ts @@ -6,6 +6,7 @@ import { Captions } from "./captions"; import { Chat } from "./chat"; import { FakeBroadcast } from "./fake"; import { Bounds, Vector } from "./geometry"; +import { Name } from "./name"; import { Sound } from "./sound"; import { Video } from "./video"; @@ -42,6 +43,7 @@ export class Broadcast { video: Video; chat: Chat; captions: Captions; + name: Name; // The current chat message, if any. message = new Signal(undefined); @@ -90,6 +92,7 @@ export class Broadcast { this.audio = new Audio(this, props.sound); this.chat = new Chat(this, props.canvas); this.captions = new Captions(this, props.canvas); + this.name = new Name(this, props.canvas); const viewport = this.canvas.viewport.peek(); @@ -285,6 +288,7 @@ export class Broadcast { this.video.close(); this.chat.close(); this.captions.close(); + this.name.close(); // NOTE: Don't close the source broadcast; we need it for the local preview. // this.source.close(); diff --git a/app/src/room/name.ts b/app/src/room/name.ts new file mode 100644 index 00000000..0c281d1f --- /dev/null +++ b/app/src/room/name.ts @@ -0,0 +1,98 @@ +import { Effect, Signal } from "@kixelated/signals"; +import * as DOM from "@kixelated/signals/dom"; +import type { Broadcast } from "./broadcast"; +import type { Canvas } from "./canvas"; +import { Bounds, Vector } from "./geometry"; + +export class Name { + canvas: Canvas; + broadcast: Broadcast; + + signals = new Effect(); + + #hovering = new Signal(false); + #profile = new Signal(false); + + constructor(broadcast: Broadcast, canvas: Canvas) { + this.broadcast = broadcast; + this.canvas = canvas; + + this.signals.effect(this.#render.bind(this)); + } + + setHovering(hovering: boolean) { + this.#hovering.set(hovering); + } + + setProfile(profile: boolean) { + this.#profile.set(profile); + } + + #render(effect: Effect) { + const root = DOM.create("div", { + className: + "fixed pointer-events-none transition-opacity duration-200 text-white font-bold [text-shadow:_-1px_-1px_0_#000,_1px_-1px_0_#000,_-1px_1px_0_#000,_1px_1px_0_#000] overflow-hidden text-ellipsis whitespace-nowrap select-none", + }); + + // Update the position of the name when the broadcast bounds or viewport changes + const updatePosition = (bounds: Bounds, viewport: Vector, zoom: number) => { + // Get the canvas element's position on the page + const canvasRect = this.canvas.element.getBoundingClientRect(); + + // Scale bounds from canvas coordinates to page coordinates + const scaleX = canvasRect.width / viewport.x; + const scaleY = canvasRect.height / viewport.y; + + // Transform bounds to page coordinates + const pageBounds = { + x: bounds.position.x * scaleX + canvasRect.left, + y: bounds.position.y * scaleY + canvasRect.top, + width: bounds.size.x * scaleX, + }; + + // Position name at top-left of broadcast with offset + const fontSize = 12; + const offset = 12; + const left = Math.round(pageBounds.x + offset); + const top = Math.round(pageBounds.y + offset); + + root.style.left = `${left}px`; + root.style.top = `${top}px`; + root.style.fontSize = `${fontSize}px`; + root.style.maxWidth = `${pageBounds.width - 2 * offset}px`; + }; + + // Update name text + effect.effect((effect) => { + const name = effect.get(this.broadcast.source.user.name); + root.textContent = name || ""; + }); + + // Update position when bounds, viewport, or zoom change + effect.effect((effect) => { + const bounds = effect.get(this.broadcast.bounds); + const viewport = effect.get(this.broadcast.canvas.viewport); + const zoom = effect.get(this.broadcast.zoom); + updatePosition(bounds, viewport, zoom); + }); + + // Update z-index based on broadcast position + effect.effect((effect) => { + const z = effect.get(this.broadcast.position).z; + root.style.zIndex = `${100 + z}`; + }); + + // Control opacity based on hovering or profile mode + effect.effect((effect) => { + const hovering = effect.get(this.#hovering); + const profile = effect.get(this.#profile); + root.style.opacity = hovering || profile ? "1" : "0"; + }); + + DOM.render(effect, document.body, root); + } + + close() { + this.signals.close(); + } +} diff --git a/app/src/room/space.ts b/app/src/room/space.ts index f9b43adc..90fcc78e 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -115,7 +115,10 @@ export class Space { this.#publishPosition(this.#dragging); this.#dragging = undefined; - this.#hovering = undefined; + if (this.#hovering) { + this.#hovering.name.setHovering(false); + this.#hovering = undefined; + } document.body.style.cursor = "default"; } } @@ -135,13 +138,22 @@ export class Space { const broadcast = this.#at(mouse); if (broadcast) { - this.#hovering = broadcast; + if (this.#hovering !== broadcast) { + if (this.#hovering) { + this.#hovering.name.setHovering(false); + } + this.#hovering = broadcast; + this.#hovering.name.setHovering(true); + } if (!broadcast.locked()) { document.body.style.cursor = "grab"; } } else { - this.#hovering = undefined; + if (this.#hovering) { + this.#hovering.name.setHovering(false); + this.#hovering = undefined; + } document.body.style.cursor = "default"; } } @@ -151,7 +163,10 @@ export class Space { this.#publishPosition(this.#dragging); this.#dragging = undefined; - this.#hovering = undefined; + if (this.#hovering) { + this.#hovering.name.setHovering(false); + this.#hovering = undefined; + } document.body.style.cursor = "default"; } } @@ -168,7 +183,13 @@ export class Space { return; } - this.#hovering = broadcast; + if (this.#hovering !== broadcast) { + if (this.#hovering) { + this.#hovering.name.setHovering(false); + } + this.#hovering = broadcast; + this.#hovering.name.setHovering(true); + } // Bump the z-index unless we're already at the top. broadcast.position.update((prev) => ({ @@ -343,7 +364,10 @@ export class Space { if (this.#touches.size === 0 && this.#dragging) { this.#publishPosition(this.#dragging); this.#dragging = undefined; - this.#hovering = undefined; + if (this.#hovering) { + this.#hovering.name.setHovering(false); + this.#hovering = undefined; + } this.#pinchStartDistance = 0; this.#pinchStartScale = 1; } @@ -379,7 +403,10 @@ export class Space { if (this.#dragging) { this.#publishPosition(this.#dragging); this.#dragging = undefined; - this.#hovering = undefined; + if (this.#hovering) { + this.#hovering.name.setHovering(false); + this.#hovering = undefined; + } this.#pinchStartDistance = 0; this.#pinchStartScale = 1; } @@ -411,6 +438,9 @@ export class Space { z: ++this.#maxZ, })); + // Set profile mode for the name display + broadcast.name.setProfile(this.profile); + if (this.lookup.has(id)) { throw new Error(`broadcast already exists: ${id}`); } diff --git a/app/src/room/video.ts b/app/src/room/video.ts index 1ea06f9a..d7f5605b 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -28,7 +28,6 @@ export class Video { online = 0; #memeOpacity = 0; - #nameOpacity = 0; // Signal that updates when meme video dimensions are loaded #memeSize = new Signal(undefined); @@ -328,243 +327,4 @@ export class Video { this.#gl.deleteTexture(this.avatarTexture); this.#gl.deleteTexture(this.memeTexture); } - - // TODO: Rendering is now handled by WebGL in space.ts - // This method is kept for reference but will be removed - /* - render( - _now: DOMHighResTimeStamp, - ctx: CanvasRenderingContext2D, - modifiers?: { - dragging?: boolean; - hovering?: boolean; - }, - ) { - ctx.save(); - - const bounds = this.broadcast.bounds.peek(); - const scale = this.broadcast.zoom.peek(); - - ctx.translate(bounds.position.x, bounds.position.y); - ctx.globalAlpha *= this.online; - ctx.fillStyle = "#000"; - - ctx.save(); - - // Add a drop shadow - ctx.shadowColor = "rgba(0, 0, 0, 1.0)"; - ctx.shadowBlur = 16 * scale; - ctx.shadowOffsetX = 0; - ctx.shadowOffsetY = 4 * scale; - - // Create a rounded rectangle path - const radius = 12 * scale; - const w = bounds.size.x; - const h = bounds.size.y; - - ctx.beginPath(); - ctx.moveTo(radius, 0); - ctx.lineTo(w - radius, 0); - ctx.quadraticCurveTo(w, 0, w, radius); - ctx.lineTo(w, h - radius); - ctx.quadraticCurveTo(w, h, w - radius, h); - ctx.lineTo(radius, h); - ctx.quadraticCurveTo(0, h, 0, h - radius); - ctx.lineTo(0, radius); - ctx.quadraticCurveTo(0, 0, radius, 0); - ctx.closePath(); - - ctx.fillStyle = "#000"; // just needed to apply the shadow - ctx.fill(); - - ctx.shadowColor = "transparent"; - - // Clip and draw the image - ctx.clip(); - - // Apply an opacity to the image. - if (modifiers?.dragging) { - ctx.globalAlpha *= 0.7; - } - - if (this.frame && this.avatarTransition > 0) { - ctx.save(); - ctx.globalAlpha *= this.avatarTransition; - - // Apply horizontal flip when rendering the preview. - const flip = - this.broadcast.source instanceof Publish.Broadcast && - this.broadcast.source.video.hd.config.peek()?.flip; - - if (flip) { - ctx.save(); - ctx.scale(-1, 1); - ctx.translate(-bounds.size.x, 0); - ctx.drawImage(this.frame, 0, 0, bounds.size.x, bounds.size.y); - ctx.restore(); - } else { - ctx.drawImage(this.frame, 0, 0, bounds.size.x, bounds.size.y); - } - ctx.restore(); - } - - if (this.avatarTransition < 1) { - ctx.save(); - ctx.globalAlpha *= 1 - this.avatarTransition; - - if (this.avatar.complete) { - ctx.drawImage(this.avatar, 0, 0, bounds.size.x, bounds.size.y); - } else { - ctx.fillRect(0, 0, bounds.size.x, bounds.size.y); - } - - ctx.restore(); - } - - const meme = this.broadcast.meme.peek(); - if (meme) { - if (meme.currentTime > 0) { - ctx.save(); - ctx.globalAlpha *= this.#memeOpacity; - - if (meme instanceof HTMLVideoElement) { - // Get the meme configuration - const memeName = this.broadcast.memeName.peek(); - let fit: "contain" | "cover" = "cover"; // default - let position = "center"; // default - - if (memeName) { - // Remove hyphens for lookup if needed - const lookupKey = memeName.toLowerCase().replace(/-/g, ""); - const memeKey = MEME_VIDEO_LOOKUP[lookupKey] || memeName; - const memeData = MEME_VIDEO[memeKey as MemeVideoName]; - if (memeData) { - fit = memeData.fit || "cover"; - position = memeData.position || "center"; - } - } - - const aspectRatio = meme.videoWidth / meme.videoHeight; - const boundsAspectRatio = bounds.size.x / bounds.size.y; - let width: number; - let height: number; - - if (fit === "contain") { - // Fit entire video within bounds (may have letterbox/pillarbox) - if (aspectRatio > boundsAspectRatio) { - // Video is wider than bounds - fit by width - width = bounds.size.x; - height = width / aspectRatio; - } else { - // Video is taller than bounds - fit by height - height = bounds.size.y; - width = height * aspectRatio; - } - } else { - // cover: fill the bounds (may crop) - if (aspectRatio > boundsAspectRatio) { - // Video is wider than bounds - use height to fill - height = bounds.size.y; - width = height * aspectRatio; - } else { - // Video is taller than bounds - use width to fill - width = bounds.size.x; - height = width / aspectRatio; - } - } - - // Parse position string (e.g., "center", "bottom", "bottom left", "50% 75%") - let xPos = 0.5; // default center - let yPos = 0.5; // default center - - const positionParts = position.toLowerCase().split(/\s+/); - for (const part of positionParts) { - if (part === "left") xPos = 0; - else if (part === "right") xPos = 1; - else if (part === "top") yPos = 0; - else if (part === "bottom") yPos = 1; - else if (part === "center") { - // Keep defaults - } else if (part.endsWith("%")) { - const value = parseFloat(part) / 100; - // Determine if this is x or y based on what we've seen - if (positionParts.length === 1) { - xPos = value; // Single value applies to x - } else if (positionParts.indexOf(part) === 0) { - xPos = value; // First value is x - } else { - yPos = value; // Second value is y - } - } - } - - // Calculate position based on alignment - const x = (bounds.size.x - width) * xPos; - const y = (bounds.size.y - height) * yPos; - - // Add a pixel in each direction to account for any rounding errors. - ctx.drawImage(meme, x - 1, y - 1, width + 2, height + 2); - } else { - // Get the emoji for this audio meme - const memeName = this.broadcast.memeName.peek(); - let emoji = "🔊"; // Default speaker emoji - - if (memeName) { - // Remove hyphens for lookup if needed - const lookupKey = memeName.toLowerCase().replace(/-/g, ""); - const memeKey = MEME_AUDIO_LOOKUP[lookupKey] || memeName; - const memeData = MEME_AUDIO[memeKey as keyof typeof MEME_AUDIO]; - if (memeData) { - emoji = memeData.emoji; - } - } - - const fontSize = Math.round(32 + 32 * scale); // round to avoid busting font caches - // Draw the emoji for this audio meme - ctx.font = `bold ${fontSize}px Arial`; - ctx.fillStyle = "white"; - // Render it at the bottom center of the bounds. - ctx.fillText(emoji, bounds.size.x / 2 - fontSize / 2, bounds.size.y - fontSize / 2); - } - - ctx.restore(); - } - - if (meme.ended || (meme.paused && meme.currentTime > 0)) { - this.#memeOpacity += -this.#memeOpacity * 0.1; - if (this.#memeOpacity <= 0) { - this.broadcast.meme.set(undefined); - this.broadcast.memeName.set(undefined); - } - } else { - this.#memeOpacity += (1 - this.#memeOpacity) * 0.1; - } - } - - // Cancel the clip - ctx.restore(); - - // Render the display name when hovering. - const targetOpacity = modifiers?.hovering ? 1 : 0; - this.#nameOpacity += (targetOpacity - this.#nameOpacity) * 0.1; - - const name = this.broadcast.source.user.name.peek(); - - if (this.#nameOpacity > 0 && name) { - const fontSize = Math.round(Math.max(14 * scale, 10)); - ctx.save(); - ctx.globalAlpha *= this.#nameOpacity; - ctx.font = `bold ${fontSize}px Arial`; - ctx.fillStyle = "white"; - ctx.strokeStyle = "black"; - ctx.lineWidth = 2 * scale; - const offset = 12 * scale; - ctx.strokeText(name, offset, 2 * offset, bounds.size.x - 2 * offset); - ctx.fillText(name, offset, 2 * offset, bounds.size.x - 2 * offset); - ctx.restore(); - } - - ctx.restore(); - } - */ } From 668dc4947707a37ff7627418394aaeacbde35f45 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 06:48:23 -0700 Subject: [PATCH 12/19] gud --- app/src/index.tsx | 2 +- app/src/room/canvas.ts | 2 +- app/src/room/gl/background.ts | 4 ++-- app/src/room/gl/border.ts | 5 ----- app/src/room/gl/outline.ts | 8 ++++---- app/src/room/gl/shaders/border.frag | 13 +++---------- app/src/room/name.ts | 5 ++--- app/src/room/space.ts | 6 +++--- app/src/room/video.ts | 4 ++-- 9 files changed, 18 insertions(+), 31 deletions(-) diff --git a/app/src/index.tsx b/app/src/index.tsx index cfc6e126..154b2c7d 100644 --- a/app/src/index.tsx +++ b/app/src/index.tsx @@ -25,7 +25,7 @@ import { Canvas } from "./room/canvas"; import { Sup } from "./sup"; export function Hang(): JSX.Element { - const background = () as HTMLCanvasElement; + const background = () as HTMLCanvasElement; const canvas = new Canvas(background); onCleanup(() => canvas.close()); diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index 8065f64d..d85ddf08 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -139,7 +139,7 @@ export class Canvas { this.#glContext.clear(); // Render background with shader - this.#backgroundRenderer.render(); + this.#backgroundRenderer.render(now); // TODO: Render demo text if enabled // if (this.demo.peek()) { diff --git a/app/src/room/gl/background.ts b/app/src/room/gl/background.ts index ed1f9470..f4c6ada1 100644 --- a/app/src/room/gl/background.ts +++ b/app/src/room/gl/background.ts @@ -52,13 +52,13 @@ export class BackgroundRenderer { gl.bindVertexArray(null); } - render() { + render(now: DOMHighResTimeStamp) { const gl = this.#glContext.gl; const viewport = this.#glContext.viewport.peek(); this.#program.use(); this.#u_resolution.set(viewport.x, viewport.y); - this.#u_time.set(this.#glContext.uniforms.time); + this.#u_time.set(now); gl.bindVertexArray(this.#vao); gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); diff --git a/app/src/room/gl/border.ts b/app/src/room/gl/border.ts index a517b214..a3cd5b80 100644 --- a/app/src/room/gl/border.ts +++ b/app/src/room/gl/border.ts @@ -19,7 +19,6 @@ export class BorderRenderer { #u_radius: Uniform1f; #u_size: Uniform2f; #u_opacity: Uniform1f; - #u_border: Uniform1f; // Typed attributes #a_position: Attribute; @@ -35,7 +34,6 @@ export class BorderRenderer { this.#u_radius = this.#program.createUniform1f("u_radius"); this.#u_size = this.#program.createUniform2f("u_size"); this.#u_opacity = this.#program.createUniform1f("u_opacity"); - this.#u_border = this.#program.createUniform1f("u_border"); // Initialize typed attributes this.#a_position = this.#program.createAttribute("a_position"); @@ -124,9 +122,6 @@ export class BorderRenderer { const opacity = broadcast.video.online; this.#u_opacity.set(opacity); - // Set border size - this.#u_border.set(border); - // Draw gl.bindVertexArray(this.#vao); gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0); diff --git a/app/src/room/gl/outline.ts b/app/src/room/gl/outline.ts index be42473a..df0a984e 100644 --- a/app/src/room/gl/outline.ts +++ b/app/src/room/gl/outline.ts @@ -94,7 +94,7 @@ export class OutlineRenderer { gl.bindVertexArray(null); } - render(broadcast: Broadcast, camera: Camera, maxZ: number) { + render(broadcast: Broadcast, camera: Camera, maxZ: number, now: DOMHighResTimeStamp) { const gl = this.#canvas.gl; const bounds = broadcast.bounds.peek(); const scale = broadcast.zoom.peek(); @@ -102,6 +102,9 @@ export class OutlineRenderer { this.#program.use(); + // Set time + this.#u_time.set(now); + // Set projection matrix this.#u_projection.set(camera.projection); @@ -144,9 +147,6 @@ export class OutlineRenderer { // Set border size this.#u_border.set(border); - // Set time for animation - this.#u_time.set(this.#canvas.glContext.uniforms.time); - // Set color based on volume using HSL from old implementation // hue = 180 + volume * 120 const hue = 180 + volume * 120; diff --git a/app/src/room/gl/shaders/border.frag b/app/src/room/gl/shaders/border.frag index 2eb64433..c64250f2 100644 --- a/app/src/room/gl/shaders/border.frag +++ b/app/src/room/gl/shaders/border.frag @@ -24,27 +24,20 @@ void main() { // Calculate position from center of the bounds vec2 center = (v_pos - 0.5) * u_size; - // The video occupies the center: u_size - 2*border - vec2 videoSize = u_size - vec2(u_border * 2.0); - // Outer edge of the entire thing (edge of black border) float outerDist = roundedBoxSDF(center, u_size * 0.5, u_radius); - // Inner edge at video boundary - float videoDist = roundedBoxSDF(center, videoSize * 0.5, u_radius); - // Discard anything outside the outer bounds if (outerDist > 0.0) { discard; } - // Black border fills everything except outside the outer bounds + // Fill the entire area with black (no transparency gaps) vec3 color = vec3(0.0); float alpha = 1.0; - // Simple antialiasing - float edge = min(abs(videoDist), abs(outerDist)); - float aa = smoothstep(0.0, 1.0, edge); + // Antialiasing only on the outer edge + float aa = 1.0 - smoothstep(-1.0, 0.0, outerDist); fragColor = vec4(color, alpha * aa * u_opacity); } diff --git a/app/src/room/name.ts b/app/src/room/name.ts index 0c281d1f..32edd977 100644 --- a/app/src/room/name.ts +++ b/app/src/room/name.ts @@ -35,7 +35,7 @@ export class Name { }); // Update the position of the name when the broadcast bounds or viewport changes - const updatePosition = (bounds: Bounds, viewport: Vector, zoom: number) => { + const updatePosition = (bounds: Bounds, viewport: Vector) => { // Get the canvas element's position on the page const canvasRect = this.canvas.element.getBoundingClientRect(); @@ -72,8 +72,7 @@ export class Name { effect.effect((effect) => { const bounds = effect.get(this.broadcast.bounds); const viewport = effect.get(this.broadcast.canvas.viewport); - const zoom = effect.get(this.broadcast.zoom); - updatePosition(bounds, viewport, zoom); + updatePosition(bounds, viewport); }); // Update z-index based on broadcast position diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 90fcc78e..2db8e220 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -587,7 +587,7 @@ export class Space { } // Render using WebGL - #render() { + #render(now: DOMHighResTimeStamp) { // TODO: Render the audio click prompt if audio is suspended // if (this.sound.suspended.peek() && !this.profile) { // this.#renderAudioPrompt(); @@ -608,10 +608,10 @@ export class Space { // 2. Render audio visualizations (middle layer) for (const broadcast of this.#rip) { - this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ); + this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); } for (const broadcast of broadcasts) { - this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ); + this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); } // 3. Render video content (front layer) diff --git a/app/src/room/video.ts b/app/src/room/video.ts index d7f5605b..0147640d 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -92,12 +92,12 @@ export class Video { const gl = this.#gl; gl.bindTexture(gl.TEXTURE_2D, this.avatarTexture); - gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, avatar); - gl.generateMipmap(gl.TEXTURE_2D); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, avatar); + gl.generateMipmap(gl.TEXTURE_2D); gl.bindTexture(gl.TEXTURE_2D, null); } From f7e67062db7a2ff3cff0fe6475953e28b4e5edf1 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 06:52:37 -0700 Subject: [PATCH 13/19] Fix --- app/src/account.tsx | 4 +-- app/src/privacy.tsx | 77 ++++++++++++++++++++++++++++++++------------ app/src/settings.tsx | 20 ++++++------ app/vite.config.ts | 2 +- 4 files changed, 70 insertions(+), 33 deletions(-) diff --git a/app/src/account.tsx b/app/src/account.tsx index 47ad23a9..bda89205 100644 --- a/app/src/account.tsx +++ b/app/src/account.tsx @@ -114,8 +114,8 @@ function AccountLoad(): JSX.Element {

Delete Account?

- This action cannot be undone. Your account, profile information, and all associated data will be - permanently deleted. + This action cannot be undone. Your account, profile information, and all associated data + will be permanently deleted.

- ) + ); } diff --git a/app/src/settings.tsx b/app/src/settings.tsx index ea04989a..8f19a3f7 100644 --- a/app/src/settings.tsx +++ b/app/src/settings.tsx @@ -74,16 +74,18 @@ export const Settings = { // Rendering settings rendering: { - devicePixelRatio: new Signal((() => { - const stored = localStorage.getItem("settings.rendering.devicePixelRatio"); - if (stored) { - const parsed = Number.parseFloat(stored); - if (!Number.isNaN(parsed) && parsed > 0 && parsed <= window.devicePixelRatio) { - return parsed; + devicePixelRatio: new Signal( + (() => { + const stored = localStorage.getItem("settings.rendering.devicePixelRatio"); + if (stored) { + const parsed = Number.parseFloat(stored); + if (!Number.isNaN(parsed) && parsed > 0 && parsed <= window.devicePixelRatio) { + return parsed; + } } - } - return Math.max(1, window.devicePixelRatio / 2); - })()), + return Math.max(1, window.devicePixelRatio / 2); + })(), + ), }, clear: () => { diff --git a/app/vite.config.ts b/app/vite.config.ts index b0c73b8f..210ea336 100644 --- a/app/vite.config.ts +++ b/app/vite.config.ts @@ -5,7 +5,7 @@ import solid from "vite-plugin-solid"; import { viteStaticCopy } from "vite-plugin-static-copy"; // https://vitejs.dev/config/ -export default defineConfig(({ mode }) => { +export default defineConfig(() => { return { define: { TAURI: JSON.stringify(!!process.env.TAURI_ENV_PLATFORM), From 019a2e863da76746d6ce3a34e44a8379406633f9 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 09:25:57 -0700 Subject: [PATCH 14/19] ez. --- app/src/room/broadcast.ts | 1 - app/src/room/fake.ts | 14 +++- app/src/room/gl/broadcast.ts | 89 +++++++++++--------------- app/src/room/gl/shaders/broadcast.frag | 49 ++++++++++---- app/src/room/space.ts | 6 +- app/src/room/video.ts | 74 ++++++++------------- moq | 2 +- 7 files changed, 116 insertions(+), 119 deletions(-) diff --git a/app/src/room/broadcast.ts b/app/src/room/broadcast.ts index 5e4d28aa..852aa402 100644 --- a/app/src/room/broadcast.ts +++ b/app/src/room/broadcast.ts @@ -192,7 +192,6 @@ export class Broadcast { } } - // TODO Also make scale a signal tick() { this.video.tick(); this.audio.tick(); diff --git a/app/src/room/fake.ts b/app/src/room/fake.ts index 582585f4..9e6faa7e 100644 --- a/app/src/room/fake.ts +++ b/app/src/room/fake.ts @@ -53,7 +53,7 @@ export class FakeBroadcast { }; video = { - frame: new Signal(undefined), + frame: new Signal(undefined), catalog: new Signal(undefined), detection: { enabled: new Signal(false), @@ -115,7 +115,17 @@ export class FakeBroadcast { video.play(); this.#video = video; - this.video.frame.set(video); + + const onFrame = () => { + if (!video.paused && !video.ended) { + this.video.frame.set(new VideoFrame(video)); + video.requestVideoFrameCallback(onFrame); + } else { + this.video.frame.set(undefined); + } + }; + + video.requestVideoFrameCallback(onFrame); video.onloadedmetadata = () => { this.video.catalog.set([ diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index 14ca4f09..09bb5aa9 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -20,14 +20,15 @@ export class BroadcastRenderer { #u_radius: Uniform1f; #u_size: Uniform2f; #u_opacity: Uniform1f; - #u_avatarTransition: Uniform1f; - #u_texture: Uniform1i; - #u_hasTexture: Uniform1i; + #u_frameTransition: Uniform1f; + #u_frameTexture: Uniform1i; + #u_frameActive: Uniform1i; #u_avatarTexture: Uniform1i; - #u_hasAvatar: Uniform1i; + #u_avatarActive: Uniform1i; #u_memeTexture: Uniform1i; - #u_hasMeme: Uniform1i; - #u_memeOpacity: Uniform1f; + #u_memeActive: Uniform1i; + #u_now: Uniform1f; + #u_memeTransition: Uniform1f; #u_memeBounds: Uniform4f; // Typed attributes @@ -45,14 +46,15 @@ export class BroadcastRenderer { this.#u_radius = this.#program.createUniform1f("u_radius"); this.#u_size = this.#program.createUniform2f("u_size"); this.#u_opacity = this.#program.createUniform1f("u_opacity"); - this.#u_avatarTransition = this.#program.createUniform1f("u_avatarTransition"); - this.#u_texture = this.#program.createUniform1i("u_texture"); - this.#u_hasTexture = this.#program.createUniform1i("u_hasTexture"); + this.#u_frameTransition = this.#program.createUniform1f("u_frameTransition"); + this.#u_frameTexture = this.#program.createUniform1i("u_frameTexture"); + this.#u_frameActive = this.#program.createUniform1i("u_frameActive"); this.#u_avatarTexture = this.#program.createUniform1i("u_avatarTexture"); - this.#u_hasAvatar = this.#program.createUniform1i("u_hasAvatar"); + this.#u_avatarActive = this.#program.createUniform1i("u_avatarActive"); this.#u_memeTexture = this.#program.createUniform1i("u_memeTexture"); - this.#u_hasMeme = this.#program.createUniform1i("u_hasMeme"); - this.#u_memeOpacity = this.#program.createUniform1f("u_memeOpacity"); + this.#u_memeActive = this.#program.createUniform1i("u_memeActive"); + this.#u_now = this.#program.createUniform1f("u_now"); + this.#u_memeTransition = this.#program.createUniform1f("u_memeTransition"); this.#u_memeBounds = this.#program.createUniform4f("u_memeBounds"); // Initialize typed attributes @@ -133,17 +135,20 @@ export class BroadcastRenderer { broadcast: Broadcast, camera: Camera, maxZ: number, + now: number, modifiers?: { dragging?: boolean; hovering?: boolean; }, ) { + this.#program.use(); + + this.#u_now.set(now); + const gl = this.#canvas.gl; const bounds = broadcast.bounds.peek(); const scale = broadcast.zoom.peek(); - this.#program.use(); - // Set projection matrix this.#u_projection.set(camera.projection); @@ -168,51 +173,31 @@ export class BroadcastRenderer { } this.#u_opacity.set(opacity); - // Set avatar transition (0 = avatar, 1 = video) - this.#u_avatarTransition.set(broadcast.video.avatarTransition); - - // Bind video texture if available - const texture = broadcast.video.webcamTexture; - if (texture) { - gl.activeTexture(gl.TEXTURE0); - gl.bindTexture(gl.TEXTURE_2D, texture); - this.#u_texture.set(0); - this.#u_hasTexture.set(1); - } else { - this.#u_hasTexture.set(0); - } + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, broadcast.video.frameTexture); + this.#u_frameTexture.set(0); + this.#u_frameTransition.set(broadcast.video.frameTransition); + this.#u_frameActive.set(broadcast.video.frameActive ? 1 : 0); // Bind avatar texture if available - const avatarTexture = broadcast.video.avatarTexture; - if (avatarTexture) { - gl.activeTexture(gl.TEXTURE1); - gl.bindTexture(gl.TEXTURE_2D, avatarTexture); - this.#u_avatarTexture.set(1); - this.#u_hasAvatar.set(1); - } else { - this.#u_hasAvatar.set(0); - } + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, broadcast.video.avatarTexture); + this.#u_avatarTexture.set(1); + this.#u_avatarActive.set(broadcast.video.avatarSize ? 1 : 0); // Bind meme texture if available - const meme = broadcast.meme.peek(); const memeTexture = broadcast.video.memeTexture; const memeBounds = broadcast.video.memeBounds; - if ( - meme instanceof HTMLVideoElement && - memeTexture && - meme.readyState >= meme.HAVE_CURRENT_DATA && - memeBounds - ) { - gl.activeTexture(gl.TEXTURE2); - gl.bindTexture(gl.TEXTURE_2D, memeTexture); - this.#u_memeTexture.set(2); - this.#u_hasMeme.set(1); - this.#u_memeOpacity.set(broadcast.video.memeOpacity); - - // Use pre-computed meme bounds from Video class + const memeTransition = broadcast.video.memeTransition; + + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, memeTexture); + this.#u_memeTexture.set(2); + this.#u_memeActive.set(broadcast.video.memeActive.peek() ? 1 : 0); + this.#u_memeTransition.set(memeTransition); + + if (memeBounds) { this.#u_memeBounds.set(memeBounds.position.x, memeBounds.position.y, memeBounds.size.x, memeBounds.size.y); - } else { - this.#u_hasMeme.set(0); } // Draw diff --git a/app/src/room/gl/shaders/broadcast.frag b/app/src/room/gl/shaders/broadcast.frag index 91961889..75acb842 100644 --- a/app/src/room/gl/shaders/broadcast.frag +++ b/app/src/room/gl/shaders/broadcast.frag @@ -4,17 +4,18 @@ precision highp float; in vec2 v_texCoord; in vec2 v_pos; -uniform sampler2D u_texture; +uniform sampler2D u_frameTexture; uniform sampler2D u_avatarTexture; uniform sampler2D u_memeTexture; -uniform bool u_hasTexture; -uniform bool u_hasAvatar; -uniform bool u_hasMeme; +uniform bool u_frameActive; +uniform bool u_memeActive; +uniform bool u_avatarActive; uniform float u_radius; uniform vec2 u_size; uniform float u_opacity; -uniform float u_avatarTransition; // 0 = avatar, 1 = video -uniform float u_memeOpacity; +uniform float u_frameTransition; // start time of avatar transition in milliseconds +uniform float u_now; +uniform float u_memeTransition; // start time of meme in milliseconds uniform vec4 u_memeBounds; // x, y, width, height in texture coordinates out vec4 fragColor; @@ -26,6 +27,8 @@ float roundedBoxSDF(vec2 center, vec2 size, float radius) { } void main() { + const float TRANSITION_DURATION = 300.0; // 300ms transition + // Calculate position from center vec2 center = (v_pos - 0.5) * u_size; @@ -40,27 +43,45 @@ void main() { // Smooth edge antialiasing float alpha = 1.0 - smoothstep(-1.0, 0.0, dist); + float frameElapsed = u_now - u_frameTransition; + float frameOpacity = 0.0; + + if (u_frameActive) { + frameOpacity = clamp(frameElapsed / TRANSITION_DURATION, 0.0, 1.0); + } else { + frameOpacity = 1.0 - clamp(frameElapsed / TRANSITION_DURATION, 0.0, 1.0); + } + // Sample textures - vec4 videoColor = u_hasTexture ? texture(u_texture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); - vec4 avatarColor = u_hasAvatar ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + vec4 frameColor = frameOpacity > 0.0 ? texture(u_frameTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + vec4 avatarColor = u_avatarActive && frameOpacity < 1.0 ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + vec4 baseColor = mix(avatarColor, frameColor, frameOpacity); + + // Compute meme opacity based on time and transition direction + float memeElapsed = u_now - u_memeTransition; + float memeOpacity = 0.0; - // Blend between avatar and video based on transition - vec4 baseColor = mix(avatarColor, videoColor, u_avatarTransition); + // Fade in + if (u_memeActive) { + memeOpacity = clamp(memeElapsed / TRANSITION_DURATION, 0.0, 1.0); + } else { + // Fade out + memeOpacity = 1.0 - clamp(memeElapsed / TRANSITION_DURATION, 0.0, 1.0); + } - // Apply meme overlay if present - if (u_hasMeme && u_memeOpacity > 0.0) { + if (memeOpacity > 0.0) { // Calculate the meme texture coordinates based on memeBounds // memeBounds contains the x, y offset and width, height scaling vec2 memeTexCoord = (v_texCoord - u_memeBounds.xy) / u_memeBounds.zw; // Only sample if we're within the meme bounds if (memeTexCoord.x >= 0.0 && memeTexCoord.x <= 1.0 && - memeTexCoord.y >= 0.0 && memeTexCoord.y <= 1.0) { + memeTexCoord.y >= 0.0 && memeTexCoord.y <= 1.0) { vec4 memeColor = texture(u_memeTexture, memeTexCoord); // Blend meme on top using alpha compositing // The meme uses WebM+VP9 with alpha channel for transparency - float memeAlpha = memeColor.a * u_memeOpacity; + float memeAlpha = memeColor.a * memeOpacity; baseColor.rgb = mix(baseColor.rgb, memeColor.rgb, memeAlpha); baseColor.a = max(baseColor.a, memeAlpha); } diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 2db8e220..8efb429b 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -616,13 +616,13 @@ export class Space { // 3. Render video content (front layer) for (const broadcast of this.#rip) { - this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ); + this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); } // Render all broadcasts (except dragging) for (const broadcast of broadcasts) { if (this.#dragging !== broadcast) { - this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, { + this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now, { hovering: this.#hovering === broadcast || this.profile, }); } @@ -630,7 +630,7 @@ export class Space { // Render the dragging broadcast last so it's always on top if (this.#dragging) { - this.#broadcastRenderer.render(this.#dragging, this.canvas.camera, this.#maxZ, { + this.#broadcastRenderer.render(this.#dragging, this.canvas.camera, this.#maxZ, now, { dragging: true, }); } diff --git a/app/src/room/video.ts b/app/src/room/video.ts index 0147640d..ee0068f8 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -15,9 +15,6 @@ export class Video { // The avatar image. avatar = new Image(); - // 1 when a video frame is fully rendered, 0 when their avatar is fully rendered. - avatarTransition = 0; - // The size of the avatar in pixels. avatarSize = new Signal(undefined); @@ -27,16 +24,20 @@ export class Video { // The opacity from 0 to 1, where 0 is offline and 1 is online. online = 0; - #memeOpacity = 0; + // Time-based transition tracking (in milliseconds) + memeTransition: DOMHighResTimeStamp = 0; // When meme started appearing/disappearing + frameTransition: DOMHighResTimeStamp = 0; + frameActive: boolean = false; // Signal that updates when meme video dimensions are loaded #memeSize = new Signal(undefined); // Cached meme bounds (x_offset, y_offset, width_scale, height_scale) memeBounds?: Bounds; + memeActive: Signal = new Signal(false); // WebGL textures for this broadcast - webcamTexture: WebGLTexture; // Video texture + frameTexture: WebGLTexture; // Video texture avatarTexture: WebGLTexture; // Avatar texture memeTexture: WebGLTexture; // Meme texture #gl: WebGL2RenderingContext; @@ -47,16 +48,17 @@ export class Video { this.#gl = broadcast.canvas.gl; // Create the textures - this.webcamTexture = this.#gl.createTexture(); + this.frameTexture = this.#gl.createTexture(); this.avatarTexture = this.#gl.createTexture(); this.memeTexture = this.#gl.createTexture(); // Set up texture upload effects - this.broadcast.signals.effect(this.#runWebcam.bind(this)); + this.broadcast.signals.effect(this.#runFrame.bind(this)); this.broadcast.signals.effect(this.#runMeme.bind(this)); this.broadcast.signals.effect(this.#runMemeBounds.bind(this)); this.broadcast.signals.effect(this.#runAvatar.bind(this)); this.broadcast.signals.effect(this.#runTargetSize.bind(this)); + this.broadcast.signals.effect(this.#runMemeTransition.bind(this)); } #runAvatar(effect: Effect) { @@ -126,31 +128,27 @@ export class Video { this.targetSize.set(Vector.create(128, 128)); } - #runWebcam(effect: Effect) { - if (this.broadcast.source instanceof FakeBroadcast) { - // TODO FakeBroadcast should return a VideoFrame instead of a HTMLVideoElement. - const video = effect.get(this.broadcast.source.video.frame); - if (!video) return; - this.#videoToTexture(effect, video, this.webcamTexture); - } else { - const frame = effect.get(this.broadcast.source.video.frame); - if (!frame) return; - this.#frameToTexture(frame, this.webcamTexture); + #runFrame(effect: Effect) { + const frame = effect.get(this.broadcast.source.video.frame); + + if (!!frame !== this.frameActive) { + this.frameTransition = performance.now(); + this.frameActive = !!frame; } + + if (frame) this.#frameToTexture(frame, this.frameTexture); } #runMeme(effect: Effect) { const meme = effect.get(this.broadcast.meme); - if (!meme) return; - - // Only handle video memes (audio memes are just sound effects) - if (!(meme instanceof HTMLVideoElement)) return; + if (!meme || !(meme instanceof HTMLVideoElement)) return; this.#videoToTexture(effect, meme, this.memeTexture); // Listen for loadedmetadata event to update meme size when dimensions are available const updateSize = () => { if (meme.videoWidth > 0 && meme.videoHeight > 0) { + this.memeActive.set(true); effect.set(this.#memeSize, Vector.create(meme.videoWidth, meme.videoHeight)); } }; @@ -162,6 +160,14 @@ export class Video { // Listen for metadata load effect.event(meme, "loadedmetadata", updateSize); + effect.event(meme, "ended", () => { + this.memeActive.set(false); + }); + } + + #runMemeTransition(effect: Effect) { + effect.get(this.memeActive); + this.memeTransition = performance.now(); } #runMemeBounds(effect: Effect) { @@ -286,44 +292,20 @@ export class Video { } tick() { - if (this.broadcast.source.video.frame.peek()) { - this.avatarTransition = Math.min(this.avatarTransition + 0.05, 1); - } else { - this.avatarTransition = Math.max(this.avatarTransition - 0.05, 0); - } - if (this.broadcast.visible.peek()) { this.online += (1 - this.online) * 0.1; } else { this.online += (0 - this.online) * 0.1; } - // Update meme opacity - const meme = this.broadcast.meme.peek(); - if (meme) { - if (meme.ended || (meme.paused && meme.currentTime > 0)) { - this.#memeOpacity += -this.#memeOpacity * 0.1; - if (this.#memeOpacity <= 0) { - this.broadcast.meme.set(undefined); - this.broadcast.memeName.set(undefined); - } - } else { - this.#memeOpacity += (1 - this.#memeOpacity) * 0.1; - } - } - /* const ZOOM_SPEED = 0.005; this.#zoom = this.#zoom.lerp(this.#zoomTarget, ZOOM_SPEED); */ } - get memeOpacity(): number { - return this.#memeOpacity; - } - close() { - this.#gl.deleteTexture(this.webcamTexture); + this.#gl.deleteTexture(this.frameTexture); this.#gl.deleteTexture(this.avatarTexture); this.#gl.deleteTexture(this.memeTexture); } diff --git a/moq b/moq index 20beccf7..10bc4d28 160000 --- a/moq +++ b/moq @@ -1 +1 @@ -Subproject commit 20beccf730629bf980b4b245a21c2ab56b00188d +Subproject commit 10bc4d287d9605a01469ba82b263f60c21352da3 From aa406187a34f11eafc46b012b50233e45dc3147b Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 10:48:14 -0700 Subject: [PATCH 15/19] ez --- app/src/room/audio.ts | 2 +- app/src/room/broadcast.ts | 29 +++-- app/src/room/canvas.ts | 4 +- app/src/room/gl/border.ts | 3 +- app/src/room/gl/broadcast.ts | 29 ++--- app/src/room/gl/outline.ts | 8 +- app/src/room/gl/shaders/broadcast.frag | 42 ++----- app/src/room/gl/shaders/outline.frag | 4 +- app/src/room/local.ts | 12 -- app/src/room/meme.ts | 24 +++- app/src/room/sound.ts | 28 +++-- app/src/room/space.ts | 18 ++- app/src/room/video.ts | 162 +++++++++++++++++-------- app/src/settings.tsx | 14 +-- 14 files changed, 221 insertions(+), 158 deletions(-) diff --git a/app/src/room/audio.ts b/app/src/room/audio.ts index b97aefca..1452f378 100644 --- a/app/src/room/audio.ts +++ b/app/src/room/audio.ts @@ -44,7 +44,7 @@ export class Audio { const meme = effect.get(this.broadcast.meme); if (!meme) return; - const source = new MediaElementAudioSourceNode(this.sound.context, { mediaElement: meme }); + const source = new MediaElementAudioSourceNode(this.sound.context, { mediaElement: meme.element }); // Use the existing notifications context so we don't need to create our own panner/volume. this.sound.connect(source); diff --git a/app/src/room/broadcast.ts b/app/src/room/broadcast.ts index 852aa402..d8476aea 100644 --- a/app/src/room/broadcast.ts +++ b/app/src/room/broadcast.ts @@ -6,6 +6,7 @@ import { Captions } from "./captions"; import { Chat } from "./chat"; import { FakeBroadcast } from "./fake"; import { Bounds, Vector } from "./geometry"; +import { Meme } from "./meme"; import { Name } from "./name"; import { Sound } from "./sound"; import { Video } from "./video"; @@ -63,12 +64,17 @@ export class Broadcast { position: Signal; // The meme video/audio we're rendering, if any. - meme = new Signal(undefined); - memeName = new Signal(undefined); + meme = new Signal(undefined); scale: Signal; // room scale, 1 is 100% zoom = new Signal(1.0); // local zoom, 1 is 100% + online = new Signal(true); // false is offline, true is online + #onlineTransition: DOMHighResTimeStamp = 0; + + // Computed opacity based on online fade-in/fade-out (0-1) + opacity: number = 1; + signals = new Effect(); constructor(props: BroadcastProps) { @@ -152,11 +158,9 @@ export class Broadcast { const meme = this.audio.sound.meme(memeName); if (meme) { this.meme.update((prev) => { - prev?.pause(); + prev?.element.pause(); return meme; }); - this.memeName.set(memeName); - return; } } @@ -192,9 +196,14 @@ export class Broadcast { } } - tick() { - this.video.tick(); + tick(now: DOMHighResTimeStamp) { this.audio.tick(); + this.video.tick(now); + + // Update opacity based on online status + const fadeTime = 300; // ms + const elapsed = now - this.#onlineTransition; + this.opacity = this.online.peek() ? Math.min(1, elapsed / fadeTime) : Math.max(0, 1 - elapsed / fadeTime); const bounds = this.bounds.peek(); const viewport = this.canvas.viewport.peek(); @@ -281,6 +290,12 @@ export class Broadcast { return false; } + // Called when online status changes to trigger fade transition + setOnline(online: boolean) { + this.online.set(online); + this.#onlineTransition = performance.now(); + } + close() { this.signals.close(); this.audio.close(); diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index d85ddf08..1b446d70 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -55,7 +55,7 @@ export class Canvas { const resize = (entries: ResizeObserverEntry[]) => { for (const entry of entries) { // Get device pixel dimensions using the user's configured ratio - const dpr = Settings.rendering.devicePixelRatio.peek(); + const dpr = Settings.render.scale.peek(); // Always use contentBoxSize and multiply by our custom ratio // to ensure we respect the user's setting @@ -104,7 +104,7 @@ export class Canvas { this.#signals.event(document, "visibilitychange", visible); // Trigger resize when devicePixelRatio setting changes - this.#signals.subscribe(Settings.rendering.devicePixelRatio, () => { + this.#signals.subscribe(Settings.render.scale, () => { // Force a resize by temporarily disconnecting and reconnecting resizeObserver.disconnect(); resizeObserver.observe(this.#canvas, { box: "content-box" }); diff --git a/app/src/room/gl/border.ts b/app/src/room/gl/border.ts index a3cd5b80..c54f4116 100644 --- a/app/src/room/gl/border.ts +++ b/app/src/room/gl/border.ts @@ -119,8 +119,7 @@ export class BorderRenderer { this.#u_size.set(bounds.size.x + border * 2, bounds.size.y + border * 2); // Set opacity - const opacity = broadcast.video.online; - this.#u_opacity.set(opacity); + this.#u_opacity.set(broadcast.opacity); // Draw gl.bindVertexArray(this.#vao); diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index 09bb5aa9..36414cf1 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -20,15 +20,12 @@ export class BroadcastRenderer { #u_radius: Uniform1f; #u_size: Uniform2f; #u_opacity: Uniform1f; - #u_frameTransition: Uniform1f; + #u_frameOpacity: Uniform1f; #u_frameTexture: Uniform1i; - #u_frameActive: Uniform1i; #u_avatarTexture: Uniform1i; #u_avatarActive: Uniform1i; #u_memeTexture: Uniform1i; - #u_memeActive: Uniform1i; - #u_now: Uniform1f; - #u_memeTransition: Uniform1f; + #u_memeOpacity: Uniform1f; #u_memeBounds: Uniform4f; // Typed attributes @@ -46,15 +43,12 @@ export class BroadcastRenderer { this.#u_radius = this.#program.createUniform1f("u_radius"); this.#u_size = this.#program.createUniform2f("u_size"); this.#u_opacity = this.#program.createUniform1f("u_opacity"); - this.#u_frameTransition = this.#program.createUniform1f("u_frameTransition"); + this.#u_frameOpacity = this.#program.createUniform1f("u_frameOpacity"); this.#u_frameTexture = this.#program.createUniform1i("u_frameTexture"); - this.#u_frameActive = this.#program.createUniform1i("u_frameActive"); this.#u_avatarTexture = this.#program.createUniform1i("u_avatarTexture"); this.#u_avatarActive = this.#program.createUniform1i("u_avatarActive"); this.#u_memeTexture = this.#program.createUniform1i("u_memeTexture"); - this.#u_memeActive = this.#program.createUniform1i("u_memeActive"); - this.#u_now = this.#program.createUniform1f("u_now"); - this.#u_memeTransition = this.#program.createUniform1f("u_memeTransition"); + this.#u_memeOpacity = this.#program.createUniform1f("u_memeOpacity"); this.#u_memeBounds = this.#program.createUniform4f("u_memeBounds"); // Initialize typed attributes @@ -135,7 +129,6 @@ export class BroadcastRenderer { broadcast: Broadcast, camera: Camera, maxZ: number, - now: number, modifiers?: { dragging?: boolean; hovering?: boolean; @@ -143,8 +136,6 @@ export class BroadcastRenderer { ) { this.#program.use(); - this.#u_now.set(now); - const gl = this.#canvas.gl; const bounds = broadcast.bounds.peek(); const scale = broadcast.zoom.peek(); @@ -167,17 +158,20 @@ export class BroadcastRenderer { this.#u_size.set(bounds.size.x, bounds.size.y); // Set opacity - let opacity = broadcast.video.online; + let opacity = broadcast.opacity; if (modifiers?.dragging) { opacity *= 0.7; } + this.#u_opacity.set(opacity); + // Set pre-computed opacity values + this.#u_frameOpacity.set(broadcast.video.frameOpacity); + this.#u_memeOpacity.set(broadcast.video.memeOpacity); + gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, broadcast.video.frameTexture); this.#u_frameTexture.set(0); - this.#u_frameTransition.set(broadcast.video.frameTransition); - this.#u_frameActive.set(broadcast.video.frameActive ? 1 : 0); // Bind avatar texture if available gl.activeTexture(gl.TEXTURE1); @@ -188,13 +182,10 @@ export class BroadcastRenderer { // Bind meme texture if available const memeTexture = broadcast.video.memeTexture; const memeBounds = broadcast.video.memeBounds; - const memeTransition = broadcast.video.memeTransition; gl.activeTexture(gl.TEXTURE2); gl.bindTexture(gl.TEXTURE_2D, memeTexture); this.#u_memeTexture.set(2); - this.#u_memeActive.set(broadcast.video.memeActive.peek() ? 1 : 0); - this.#u_memeTransition.set(memeTransition); if (memeBounds) { this.#u_memeBounds.set(memeBounds.position.x, memeBounds.position.y, memeBounds.size.x, memeBounds.size.y); diff --git a/app/src/room/gl/outline.ts b/app/src/room/gl/outline.ts index df0a984e..08a0eca4 100644 --- a/app/src/room/gl/outline.ts +++ b/app/src/room/gl/outline.ts @@ -23,6 +23,7 @@ export class OutlineRenderer { #u_border: Uniform1f; #u_color: Uniform3f; #u_time: Uniform1f; + #u_finalAlpha: Uniform1f; // Typed attributes #a_position: Attribute; @@ -42,6 +43,7 @@ export class OutlineRenderer { this.#u_border = this.#program.createUniform1f("u_border"); this.#u_color = this.#program.createUniform3f("u_color"); this.#u_time = this.#program.createUniform1f("u_time"); + this.#u_finalAlpha = this.#program.createUniform1f("u_finalAlpha"); // Initialize typed attributes this.#a_position = this.#program.createAttribute("a_position"); @@ -138,7 +140,7 @@ export class OutlineRenderer { this.#u_size.set(bounds.size.x + maxExpansion * 2, bounds.size.y + maxExpansion * 2); // Apply opacity based on volume and video online status - const opacity = broadcast.video.online ? Math.min(10 * volume, 1) : 0; + const opacity = Math.min(10 * volume, 1) * broadcast.opacity; this.#u_opacity.set(opacity); // Set volume (smoothed, from 0-1) @@ -147,6 +149,10 @@ export class OutlineRenderer { // Set border size this.#u_border.set(border); + // Compute final alpha once in TypeScript instead of per pixel + const finalAlpha = 0.3 + volume * 0.4; + this.#u_finalAlpha.set(finalAlpha); + // Set color based on volume using HSL from old implementation // hue = 180 + volume * 120 const hue = 180 + volume * 120; diff --git a/app/src/room/gl/shaders/broadcast.frag b/app/src/room/gl/shaders/broadcast.frag index 75acb842..eb608a45 100644 --- a/app/src/room/gl/shaders/broadcast.frag +++ b/app/src/room/gl/shaders/broadcast.frag @@ -7,15 +7,12 @@ in vec2 v_pos; uniform sampler2D u_frameTexture; uniform sampler2D u_avatarTexture; uniform sampler2D u_memeTexture; -uniform bool u_frameActive; -uniform bool u_memeActive; uniform bool u_avatarActive; uniform float u_radius; uniform vec2 u_size; uniform float u_opacity; -uniform float u_frameTransition; // start time of avatar transition in milliseconds -uniform float u_now; -uniform float u_memeTransition; // start time of meme in milliseconds +uniform float u_frameOpacity; // Pre-computed frame opacity (0-1) +uniform float u_memeOpacity; // Pre-computed meme opacity (0-1) uniform vec4 u_memeBounds; // x, y, width, height in texture coordinates out vec4 fragColor; @@ -27,8 +24,6 @@ float roundedBoxSDF(vec2 center, vec2 size, float radius) { } void main() { - const float TRANSITION_DURATION = 300.0; // 300ms transition - // Calculate position from center vec2 center = (v_pos - 0.5) * u_size; @@ -43,33 +38,12 @@ void main() { // Smooth edge antialiasing float alpha = 1.0 - smoothstep(-1.0, 0.0, dist); - float frameElapsed = u_now - u_frameTransition; - float frameOpacity = 0.0; - - if (u_frameActive) { - frameOpacity = clamp(frameElapsed / TRANSITION_DURATION, 0.0, 1.0); - } else { - frameOpacity = 1.0 - clamp(frameElapsed / TRANSITION_DURATION, 0.0, 1.0); - } - - // Sample textures - vec4 frameColor = frameOpacity > 0.0 ? texture(u_frameTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); - vec4 avatarColor = u_avatarActive && frameOpacity < 1.0 ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); - vec4 baseColor = mix(avatarColor, frameColor, frameOpacity); - - // Compute meme opacity based on time and transition direction - float memeElapsed = u_now - u_memeTransition; - float memeOpacity = 0.0; - - // Fade in - if (u_memeActive) { - memeOpacity = clamp(memeElapsed / TRANSITION_DURATION, 0.0, 1.0); - } else { - // Fade out - memeOpacity = 1.0 - clamp(memeElapsed / TRANSITION_DURATION, 0.0, 1.0); - } + // Sample textures using pre-computed opacity values + vec4 frameColor = u_frameOpacity > 0.0 ? texture(u_frameTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + vec4 avatarColor = u_avatarActive && u_frameOpacity < 1.0 ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + vec4 baseColor = mix(avatarColor, frameColor, u_frameOpacity); - if (memeOpacity > 0.0) { + if (u_memeOpacity > 0.0) { // Calculate the meme texture coordinates based on memeBounds // memeBounds contains the x, y offset and width, height scaling vec2 memeTexCoord = (v_texCoord - u_memeBounds.xy) / u_memeBounds.zw; @@ -81,7 +55,7 @@ void main() { // Blend meme on top using alpha compositing // The meme uses WebM+VP9 with alpha channel for transparency - float memeAlpha = memeColor.a * memeOpacity; + float memeAlpha = memeColor.a * u_memeOpacity; baseColor.rgb = mix(baseColor.rgb, memeColor.rgb, memeAlpha); baseColor.a = max(baseColor.a, memeAlpha); } diff --git a/app/src/room/gl/shaders/outline.frag b/app/src/room/gl/shaders/outline.frag index 007dc661..69f4a15c 100644 --- a/app/src/room/gl/shaders/outline.frag +++ b/app/src/room/gl/shaders/outline.frag @@ -10,6 +10,7 @@ uniform float u_volume; // Audio volume 0-1 (smoothed) uniform float u_border; // Border size in pixels uniform vec3 u_color; // RGB color for the volume indicator uniform float u_time; // Time in seconds for animation +uniform float u_finalAlpha; // Pre-computed final alpha (0.3 + volume * 0.4) out vec4 fragColor; @@ -80,7 +81,6 @@ void main() { // In the colored region vec3 finalColor = u_color; - float finalAlpha = 0.3 + u_volume * 0.4; // Create a sharp line with AA on edges, inset from video boundary float innerEdge = videoDist + lineInset; // Offset inward @@ -98,5 +98,5 @@ void main() { // Combine: AA at inner edge, full in middle, AA at outer edge float aa = innerAA * mix(outerAA, 1.0, lineMask); - fragColor = vec4(finalColor, finalAlpha * aa * u_opacity); + fragColor = vec4(finalColor, u_finalAlpha * aa * u_opacity); } diff --git a/app/src/room/local.ts b/app/src/room/local.ts index 96c44d61..d14f17c7 100644 --- a/app/src/room/local.ts +++ b/app/src/room/local.ts @@ -208,18 +208,6 @@ export class Local { }); */ - // Say hi when the user joins - this.#signals.effect((effect) => { - const name = effect.get(Settings.account.name); - if (!name) return; - - // This is enabled on join. - const enabled = effect.get(this.camera.enabled); - if (!enabled) return; - - this.sound.tts.joined(name); - }); - // Use the provided camera and screen broadcasts this.camera.signals.effect((effect) => { if (effect.get(this.camera.video.source) || effect.get(this.camera.audio.source)) { diff --git a/app/src/room/meme.ts b/app/src/room/meme.ts index 518489f7..01e3de3f 100644 --- a/app/src/room/meme.ts +++ b/app/src/room/meme.ts @@ -1,9 +1,13 @@ -export type MemeAudio = { +type MemeAudioSource = { file: string; emoji: string; + + // These are not used for audio, but are defined to make it easier to use the same type for both. + fit?: "contain"; + position?: string; }; -export type MemeVideo = { +type MemeVideoSource = { file: string; // CSS object-fit: how the video fits within its container // - "contain": scales to fit entirely within container (may have letterbox/pillarbox) @@ -17,6 +21,18 @@ export type MemeVideo = { position?: string; }; +export interface MemeVideo { + source: MemeVideoSource; + element: HTMLVideoElement; +} + +export interface MemeAudio { + source: MemeAudioSource; + element: HTMLAudioElement; +} + +export type Meme = MemeVideo | MemeAudio; + export const MEME_AUDIO = { "among-us": { file: "among-us.mp3", emoji: "📮" }, aww: { file: "aww.mp3", emoji: "🥺" }, @@ -55,7 +71,7 @@ export const MEME_AUDIO = { boom: { file: "boom.mp3", emoji: "💥" }, wow: { file: "wow.mp3", emoji: "😮" }, yay: { file: "yay.mp3", emoji: "🎉" }, -} as const satisfies Record; +} as const satisfies Record; export const MEME_VIDEO = { "another-one": { file: "another-one.webm", fit: "cover", position: "bottom" }, @@ -92,7 +108,7 @@ export const MEME_VIDEO = { "real-estate": { file: "real-estate.webm", fit: "cover", position: "center" }, // fit height, left/right can letterbox waw: { file: "waw.webm", fit: "cover", position: "bottom" }, zzz: { file: "zzz.webm", fit: "cover", position: "center" }, -} as const satisfies Record; +} as const satisfies Record; export type MemeAudioName = keyof typeof MEME_AUDIO; export type MemeVideoName = keyof typeof MEME_VIDEO; diff --git a/app/src/room/sound.ts b/app/src/room/sound.ts index 6bc6b78e..3a8bd331 100644 --- a/app/src/room/sound.ts +++ b/app/src/room/sound.ts @@ -132,7 +132,15 @@ export class Sound { } } -import { MEME_AUDIO, MEME_AUDIO_LOOKUP, MEME_VIDEO, MEME_VIDEO_LOOKUP, MemeAudioName, MemeVideoName } from "./meme"; +import { + MEME_AUDIO, + MEME_AUDIO_LOOKUP, + MEME_VIDEO, + MEME_VIDEO_LOOKUP, + Meme, + MemeAudioName, + MemeVideoName, +} from "./meme"; export class PannedNotifications { #parent: Sound; @@ -183,7 +191,7 @@ export class PannedNotifications { // NOTE: We don't cache elements because the browser will. // Otherwise it would be a pain in the butt to manage if the same meme is played simultaneously. - meme(name: string): HTMLAudioElement | HTMLVideoElement | undefined { + meme(name: string): Meme | undefined { // Make the name lowercase and remove hyphens for lookup const lower = name.toLowerCase(); const lookupKey = lower.replace(/-/g, ""); @@ -192,13 +200,13 @@ export class PannedNotifications { const videoKey = MEME_VIDEO_LOOKUP[lookupKey] || (lower as MemeVideoName); const audioKey = MEME_AUDIO_LOOKUP[lookupKey] || (lower as MemeAudioName); - const videoData = MEME_VIDEO[videoKey]; - const audioData = MEME_AUDIO[audioKey]; + const videoSource = MEME_VIDEO[videoKey]; + const audioSource = MEME_AUDIO[audioKey]; // Use the video if it's available - if (videoData) { + if (videoSource) { const video = document.createElement("video") as HTMLVideoElement; - video.src = `/meme/${videoData.file}`; + video.src = `/meme/${videoSource.file}`; if (this.#parent.suspended.peek()) { video.muted = true; // so we can start loading @@ -210,14 +218,14 @@ export class PannedNotifications { video.autoplay = true; video.load(); video.play(); - return video; + return { element: video, source: videoSource }; } - if (audioData) { - const audio = new Audio(`/meme/${audioData.file}`); + if (audioSource) { + const audio = new Audio(`/meme/${audioSource.file}`); audio.autoplay = true; audio.load(); - return audio; + return { element: audio, source: audioSource }; } return undefined; diff --git a/app/src/room/space.ts b/app/src/room/space.ts index 8efb429b..c35555df 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -498,6 +498,8 @@ export class Space { const name = effect.get(broadcast.source.user.name); if (!name) return; + if (name.endsWith("(screen)")) return; + this.sound.tts.joined(name); }); @@ -507,6 +509,8 @@ export class Space { const name = effect.get(broadcast.source.user.name); if (!name) return; + if (name.endsWith("(screen)")) return; + this.sound.tts.left(name); }); @@ -519,6 +523,8 @@ export class Space { throw new Error(`broadcast not found: ${path}`); } + broadcast.setOnline(false); + this.lookup.delete(path); // Move it from the main list to the rip list. @@ -546,14 +552,16 @@ export class Space { // Tick physics separately from rendering #tickAll() { + const now = performance.now(); + for (const broadcast of this.#rip) { - broadcast.tick(); + broadcast.tick(now); } const broadcasts = this.ordered.peek(); for (const broadcast of broadcasts) { - broadcast.tick(); + broadcast.tick(now); } // Check for collisions. @@ -616,13 +624,13 @@ export class Space { // 3. Render video content (front layer) for (const broadcast of this.#rip) { - this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now); + this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ); } // Render all broadcasts (except dragging) for (const broadcast of broadcasts) { if (this.#dragging !== broadcast) { - this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now, { + this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, { hovering: this.#hovering === broadcast || this.profile, }); } @@ -630,7 +638,7 @@ export class Space { // Render the dragging broadcast last so it's always on top if (this.#dragging) { - this.#broadcastRenderer.render(this.#dragging, this.canvas.camera, this.#maxZ, now, { + this.#broadcastRenderer.render(this.#dragging, this.canvas.camera, this.#maxZ, { dragging: true, }); } diff --git a/app/src/room/video.ts b/app/src/room/video.ts index ee0068f8..1e0ed056 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -1,9 +1,8 @@ import { Effect, Signal } from "@kixelated/signals"; import * as Api from "../api"; +import Settings from "../settings"; import type { Broadcast } from "./broadcast"; -import { FakeBroadcast } from "./fake"; import { Bounds, Vector } from "./geometry"; -import { MEME_VIDEO, MEME_VIDEO_LOOKUP, type MemeVideoName } from "./meme"; //export type VideoSource = Watch.Video.Source | Publish.Video.Encoder; @@ -21,14 +20,15 @@ export class Video { // The desired size of the video in pixels. targetSize = new Signal(Vector.create(128, 128)); - // The opacity from 0 to 1, where 0 is offline and 1 is online. - online = 0; - // Time-based transition tracking (in milliseconds) - memeTransition: DOMHighResTimeStamp = 0; // When meme started appearing/disappearing - frameTransition: DOMHighResTimeStamp = 0; + #memeTransition: DOMHighResTimeStamp = 0; // When meme started appearing/disappearing + #frameTransition: DOMHighResTimeStamp = 0; frameActive: boolean = false; + // Computed opacity values (calculated once per frame instead of per pixel) + frameOpacity: number = 0; + memeOpacity: number = 0; + // Signal that updates when meme video dimensions are loaded #memeSize = new Signal(undefined); @@ -42,6 +42,9 @@ export class Video { memeTexture: WebGLTexture; // Meme texture #gl: WebGL2RenderingContext; + // Render avatars and emojis at this size + #renderSize = new Signal(128); + constructor(broadcast: Broadcast) { this.broadcast = broadcast; @@ -59,6 +62,8 @@ export class Video { this.broadcast.signals.effect(this.#runAvatar.bind(this)); this.broadcast.signals.effect(this.#runTargetSize.bind(this)); this.broadcast.signals.effect(this.#runMemeTransition.bind(this)); + + this.broadcast.signals.effect(this.#runRenderSize.bind(this)); } #runAvatar(effect: Effect) { @@ -77,9 +82,9 @@ export class Video { // For SVGs, load at higher resolution to avoid pixelation // Set a reasonable size (e.g., 512x512) for better quality if (avatar.endsWith(".svg")) { - // TODO Automatically adjust? - newAvatar.width = 512; - newAvatar.height = 512; + const size = effect.get(this.#renderSize); + newAvatar.width = size; + newAvatar.height = size; } newAvatar.src = avatar; @@ -132,7 +137,7 @@ export class Video { const frame = effect.get(this.broadcast.source.video.frame); if (!!frame !== this.frameActive) { - this.frameTransition = performance.now(); + this.#frameTransition = performance.now(); this.frameActive = !!frame; } @@ -141,38 +146,56 @@ export class Video { #runMeme(effect: Effect) { const meme = effect.get(this.broadcast.meme); - if (!meme || !(meme instanceof HTMLVideoElement)) return; + if (!meme) { + this.memeActive.set(false); + return; + } + + const element = meme.element; + + effect.event(element, "ended", () => { + this.memeActive.set(false); + }); - this.#videoToTexture(effect, meme, this.memeTexture); + if (element instanceof HTMLVideoElement) { + this.#videoToTexture(effect, element, this.memeTexture); - // Listen for loadedmetadata event to update meme size when dimensions are available - const updateSize = () => { - if (meme.videoWidth > 0 && meme.videoHeight > 0) { - this.memeActive.set(true); - effect.set(this.#memeSize, Vector.create(meme.videoWidth, meme.videoHeight)); + // Listen for loadedmetadata event to update meme size when dimensions are available + const updateSize = () => { + if (element.videoWidth > 0 && element.videoHeight > 0) { + this.memeActive.set(true); + effect.set(this.#memeSize, Vector.create(element.videoWidth, element.videoHeight)); + } + }; + + // Check if already loaded + if (element.readyState >= 1) { + updateSize(); } - }; - // Check if already loaded - if (meme.readyState >= 1) { - updateSize(); - } + // Listen for metadata load + effect.event(element, "loadedmetadata", updateSize); + } else if ("emoji" in meme.source) { + const emoji = meme.source.emoji; - // Listen for metadata load - effect.event(meme, "loadedmetadata", updateSize); - effect.event(meme, "ended", () => { - this.memeActive.set(false); - }); + effect.effect((effect) => { + // Audio meme - render emoji to texture + const size = effect.get(this.#renderSize); + this.#emojiToTexture(emoji, size); + }); + + this.memeActive.set(true); + } } #runMemeTransition(effect: Effect) { effect.get(this.memeActive); - this.memeTransition = performance.now(); + this.#memeTransition = performance.now(); } #runMemeBounds(effect: Effect) { const meme = effect.get(this.broadcast.meme); - if (!meme || !(meme instanceof HTMLVideoElement)) return; + if (!meme) return; // Wait until meme dimensions are available const memeSize = effect.get(this.#memeSize); @@ -181,20 +204,8 @@ export class Video { // Also react to bounds changes const bounds = effect.get(this.broadcast.bounds); - // Get meme configuration - const memeName = effect.get(this.broadcast.memeName); - let fit: "contain" | "cover" = "cover"; - let position = "center"; - - if (memeName) { - const lookupKey = memeName.toLowerCase().replace(/-/g, ""); - const memeKey = MEME_VIDEO_LOOKUP[lookupKey] || memeName; - const memeData = MEME_VIDEO[memeKey as MemeVideoName]; - if (memeData) { - fit = memeData.fit || "cover"; - position = memeData.position || "center"; - } - } + const fit = meme.source.fit || "cover"; + const position = meme.source.position || "center"; // Calculate meme bounds based on fit and position const aspectRatio = memeSize.x / memeSize.y; @@ -291,17 +302,64 @@ export class Video { effect.cleanup(() => src.cancelVideoFrameCallback(cancel)); } - tick() { - if (this.broadcast.visible.peek()) { - this.online += (1 - this.online) * 0.1; + #emojiToTexture(emoji: string, size: number) { + const gl = this.#gl; + + // Create offscreen canvas + const canvas = document.createElement("canvas"); + canvas.width = size; + canvas.height = size; + const ctx = canvas.getContext("2d"); + if (!ctx) return; + + // Render emoji centered + ctx.textAlign = "center"; + ctx.textBaseline = "middle"; + ctx.font = `${size * 0.5}px "Apple Color Emoji", "Segoe UI Emoji", "Noto Color Emoji", sans-serif`; + // Shift down slightly to compensate for emoji baseline issues + ctx.fillText(emoji, size / 2, size * 0.56); + + // Upload to texture + gl.bindTexture(gl.TEXTURE_2D, this.memeTexture); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.bindTexture(gl.TEXTURE_2D, null); + + // Set meme size for bounds calculation + this.#memeSize.set(Vector.create(size, size)); + } + + #runRenderSize(effect: Effect) { + const scale = effect.get(Settings.render.scale); + const target = effect.get(this.broadcast.bounds).size; + const size = Math.min(target.x, target.y) * scale; + // Increase to the nearest power of 2 + const power = Math.ceil(Math.log2(size)); + this.#renderSize.set(2 ** power); + } + + // Update opacity values based on current time (called once per frame) + tick(now: DOMHighResTimeStamp) { + const TRANSITION_DURATION = 300; // ms + + // Calculate frame opacity + const frameElapsed = now - this.#frameTransition; + if (this.frameActive) { + this.frameOpacity = Math.min(1, Math.max(0, frameElapsed / TRANSITION_DURATION)); } else { - this.online += (0 - this.online) * 0.1; + this.frameOpacity = Math.max(0, 1 - frameElapsed / TRANSITION_DURATION); } - /* - const ZOOM_SPEED = 0.005; - this.#zoom = this.#zoom.lerp(this.#zoomTarget, ZOOM_SPEED); - */ + // Calculate meme opacity + const memeElapsed = now - this.#memeTransition; + if (this.memeActive.peek()) { + this.memeOpacity = Math.min(1, Math.max(0, memeElapsed / TRANSITION_DURATION)); + } else { + this.memeOpacity = Math.max(0, 1 - memeElapsed / TRANSITION_DURATION); + } } close() { diff --git a/app/src/settings.tsx b/app/src/settings.tsx index 8f19a3f7..54794982 100644 --- a/app/src/settings.tsx +++ b/app/src/settings.tsx @@ -73,10 +73,10 @@ export const Settings = { }, // Rendering settings - rendering: { - devicePixelRatio: new Signal( + render: { + scale: new Signal( (() => { - const stored = localStorage.getItem("settings.rendering.devicePixelRatio"); + const stored = localStorage.getItem("settings.render.scale"); if (stored) { const parsed = Number.parseFloat(stored); if (!Number.isNaN(parsed) && parsed > 0 && parsed <= window.devicePixelRatio) { @@ -206,8 +206,8 @@ effect.subscribe(Settings.tutorial.step, (step) => { localStorage.setItem("settings.tutorial.step", step.toString()); }); -effect.subscribe(Settings.rendering.devicePixelRatio, (ratio) => { - localStorage.setItem("settings.rendering.devicePixelRatio", ratio.toString()); +effect.subscribe(Settings.render.scale, (ratio) => { + localStorage.setItem("settings.render.scale", ratio.toString()); }); // Mostly just to avoid console warnings about signals not being closed @@ -221,7 +221,7 @@ export function Modal(props: { sound: Sound }): JSX.Element { const draggable = solid(Settings.draggable); const tts = createSelector(solid(Settings.audio.tts)); const webGPUSupported = supportsWebGPU(); - const devicePixelRatio = solid(Settings.rendering.devicePixelRatio); + const devicePixelRatio = solid(Settings.render.scale); const maxDevicePixelRatio = window.devicePixelRatio; // Calculate available pixel ratio options (0.5x, 1x, 2x, 4x, 8x) @@ -385,7 +385,7 @@ export function Modal(props: { sound: Sound }): JSX.Element { "bg-blue-500 text-white shadow-sm": isSelectedRatio(ratio), "text-white/60 hover:text-white/80 hover:bg-white/5": !isSelectedRatio(ratio), }} - onClick={() => Settings.rendering.devicePixelRatio.set(ratio)} + onClick={() => Settings.render.scale.set(ratio)} > {ratio}x From d3391b8fede4dfde714ddbd27bee856e4eb27dc0 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 13:15:46 -0700 Subject: [PATCH 16/19] misc fixes --- app/src/room/fake.ts | 10 ++++++++-- app/src/room/gl/shaders/border.frag | 2 +- app/src/room/gl/shaders/border.vert | 1 + app/src/room/name.ts | 2 +- app/src/room/video.ts | 5 ++++- 5 files changed, 15 insertions(+), 5 deletions(-) diff --git a/app/src/room/fake.ts b/app/src/room/fake.ts index 9e6faa7e..7f45e30f 100644 --- a/app/src/room/fake.ts +++ b/app/src/room/fake.ts @@ -118,10 +118,16 @@ export class FakeBroadcast { const onFrame = () => { if (!video.paused && !video.ended) { - this.video.frame.set(new VideoFrame(video)); + this.video.frame.update((prev) => { + prev?.close(); + return new VideoFrame(video); + }); video.requestVideoFrameCallback(onFrame); } else { - this.video.frame.set(undefined); + this.video.frame.update((prev) => { + prev?.close(); + return undefined; + }); } }; diff --git a/app/src/room/gl/shaders/border.frag b/app/src/room/gl/shaders/border.frag index c64250f2..0ec03941 100644 --- a/app/src/room/gl/shaders/border.frag +++ b/app/src/room/gl/shaders/border.frag @@ -28,7 +28,7 @@ void main() { float outerDist = roundedBoxSDF(center, u_size * 0.5, u_radius); // Discard anything outside the outer bounds - if (outerDist > 0.0) { + if (outerDist > 1.0) { discard; } diff --git a/app/src/room/gl/shaders/border.vert b/app/src/room/gl/shaders/border.vert index 0286f10e..1f29aa27 100644 --- a/app/src/room/gl/shaders/border.vert +++ b/app/src/room/gl/shaders/border.vert @@ -1,4 +1,5 @@ #version 300 es +precision highp float; in vec2 a_position; diff --git a/app/src/room/name.ts b/app/src/room/name.ts index 32edd977..45a86b02 100644 --- a/app/src/room/name.ts +++ b/app/src/room/name.ts @@ -59,7 +59,7 @@ export class Name { root.style.left = `${left}px`; root.style.top = `${top}px`; root.style.fontSize = `${fontSize}px`; - root.style.maxWidth = `${pageBounds.width - 2 * offset}px`; + root.style.maxWidth = `${Math.max(0, pageBounds.width - 2 * offset)}px`; }; // Update name text diff --git a/app/src/room/video.ts b/app/src/room/video.ts index 1e0ed056..09c1c3ee 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -79,6 +79,9 @@ export class Video { // TODO only set the avatar if it successfully loads const newAvatar = new Image(); + // Enable CORS for external avatar images + newAvatar.crossOrigin = "anonymous"; + // For SVGs, load at higher resolution to avoid pixelation // Set a reasonable size (e.g., 512x512) for better quality if (avatar.endsWith(".svg")) { @@ -310,7 +313,7 @@ export class Video { canvas.width = size; canvas.height = size; const ctx = canvas.getContext("2d"); - if (!ctx) return; + if (!ctx) throw new Error("Failed to get context"); // Render emoji centered ctx.textAlign = "center"; From a21f194c0288873d9ac70ef4b3c1fecb9f008dcd Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 14:17:35 -0700 Subject: [PATCH 17/19] demo/audio prompt were missing. --- app/src/about.tsx | 59 +++++++++++-------- app/src/components/audio-prompt.tsx | 21 +++++++ app/src/components/badge.ts | 16 +++-- app/src/components/demo-header.tsx | 11 ++++ app/src/components/profile.tsx | 2 +- app/src/room/canvas.ts | 33 +++-------- app/src/room/gl/common.ts | 27 --------- app/src/room/gl/context.ts | 3 - app/src/room/space.ts | 10 ---- app/src/room/video.ts | 12 ++++ app/src/settings.tsx | 2 +- app/src/tauri/update.ts | 1 + justfile | 12 ++-- .../gen/apple/hang.xcodeproj/project.pbxproj | 26 ++++---- native/gen/apple/hang_iOS/Info.plist | 2 + native/justfile | 8 +-- native/src/lib.rs | 3 +- 17 files changed, 125 insertions(+), 123 deletions(-) create mode 100644 app/src/components/audio-prompt.tsx create mode 100644 app/src/components/demo-header.tsx delete mode 100644 app/src/room/gl/common.ts diff --git a/app/src/about.tsx b/app/src/about.tsx index 0d16b459..e349a31b 100644 --- a/app/src/about.tsx +++ b/app/src/about.tsx @@ -1,5 +1,8 @@ +import solid from "@kixelated/signals/solid"; import { createEffect, createSignal, type JSX, onCleanup } from "solid-js"; +import AudioPrompt from "./components/audio-prompt"; import CreateHang from "./components/create"; +import DemoHeader from "./components/demo-header"; import Layout from "./layout/web"; import { Canvas } from "./room/canvas"; import { FakeRoom } from "./room/fake"; @@ -7,9 +10,15 @@ import { FakeRoom } from "./room/fake"; export function About(): JSX.Element { const canvas = ; - const room = new FakeRoom(new Canvas(canvas as HTMLCanvasElement, { demo: true })); + const room = new FakeRoom(new Canvas(canvas as HTMLCanvasElement)); onCleanup(() => room.close()); + const audioSuspended = solid(room.sound.suspended); + + const handleEnableAudio = () => { + room.sound.enabled.set(true); + }; + const services = ["Meet", "Zoom", "Teams", "Discord", "Skype", "WebEx", "FaceTime", "WhatsApp"]; const [currentService, setCurrentService] = createSignal(0); @@ -73,7 +82,7 @@ export function About(): JSX.Element { () => two.chat.message.latest.set("yo"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("how's life as a simulation?"), - () => {}, + () => { }, () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("okay I guess"), () => two.location.window.position.update((prev) => ({ ...prev, s: 1.5 })), @@ -103,10 +112,10 @@ export function About(): JSX.Element { }, () => three.chat.typing.active.set(true), () => three.chat.message.latest.set("hello"), - () => {}, + () => { }, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("try turning your webcam on"), - () => {}, + () => { }, () => { two.user.name.set("omni-chan"); two.user.avatar.set("/avatar/omni.jpg"); @@ -120,21 +129,21 @@ export function About(): JSX.Element { () => three.location.window.position.set(randomLocation()), () => three.chat.typing.active.set(true), () => three.chat.message.latest.set("omg"), - () => {}, - () => {}, + () => { }, + () => { }, () => two.stop(), () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("on second thought, maybe not"), - () => {}, + () => { }, () => three.chat.typing.active.set(true), () => three.chat.message.latest.set("gotta run"), () => room.remove("3"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("lame"), - () => {}, + () => { }, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("try typing /huh"), - () => {}, + () => { }, () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("/huh"), () => two.location.window.position.set({ x: -0.5, y: 0 }), @@ -145,7 +154,7 @@ export function About(): JSX.Element { () => one.chat.message.latest.set("and audio panning"), () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("huh?"), - () => {}, + () => { }, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("inviting the squad"), () => { @@ -162,12 +171,12 @@ export function About(): JSX.Element { () => five.chat.message.latest.set("yo"), () => six.chat.typing.active.set(true), () => six.chat.message.latest.set("poop"), - () => {}, - () => {}, + () => { }, + () => { }, () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("good lord"), - () => {}, - () => {}, + () => { }, + () => { }, () => six.chat.typing.active.set(true), () => six.chat.message.latest.set("let's watch something"), () => { @@ -186,12 +195,12 @@ export function About(): JSX.Element { two.chat.message.latest.set("this shit again"); }, () => two.location.window.position.set(randomLeft()), - () => {}, - () => {}, - () => {}, - () => {}, - () => {}, - () => {}, + () => { }, + () => { }, + () => { }, + () => { }, + () => { }, + () => { }, () => room.remove("7"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("okay, that's enough of a demo"), @@ -200,7 +209,7 @@ export function About(): JSX.Element { () => room.remove("6"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("there's a lot more, but it's hard to demo"), - () => {}, + () => { }, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("[start a hang](https://hang.live/home)"), () => two.audio.captions.text.set("like automatic captions"), @@ -210,7 +219,7 @@ export function About(): JSX.Element { () => room.remove("2"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("enjoy"), - () => {}, + () => { }, () => room.remove("1"), ]; @@ -251,7 +260,11 @@ export function About(): JSX.Element {
-
{canvas}
+
+ + + {canvas} +

Powered by new and open source web tech:{" "} diff --git a/app/src/components/audio-prompt.tsx b/app/src/components/audio-prompt.tsx new file mode 100644 index 00000000..a8c97bc1 --- /dev/null +++ b/app/src/components/audio-prompt.tsx @@ -0,0 +1,21 @@ +import { Show } from "solid-js"; +import type { JSX } from "solid-js/jsx-runtime"; + +export default function AudioPrompt(props: { show: boolean; onClick: () => void }): JSX.Element { + return ( + +

+ +
+ + ); +} diff --git a/app/src/components/badge.ts b/app/src/components/badge.ts index 10a28478..89b04f89 100644 --- a/app/src/components/badge.ts +++ b/app/src/components/badge.ts @@ -4,14 +4,20 @@ import * as Tauri from "../tauri"; async function set(count: number | undefined) { if (Tauri.Api) { - await Tauri.Api.window + const success = await Tauri.Api.window .getCurrentWindow() .setBadgeCount(count || undefined) - .catch((error) => console.warn("Failed to set Tauri badge:", error)); - } else if (navigator.setAppBadge) { - await navigator + .then(() => true) + .catch(() => false); + if (success) return; + } + + if (navigator.setAppBadge) { + const success = await navigator .setAppBadge(count || undefined) - .catch((error) => console.warn("Failed to set Web badge:", error)); + .then(() => true) + .catch(() => false); + if (success) return; } } diff --git a/app/src/components/demo-header.tsx b/app/src/components/demo-header.tsx new file mode 100644 index 00000000..6105d633 --- /dev/null +++ b/app/src/components/demo-header.tsx @@ -0,0 +1,11 @@ +import type { JSX } from "solid-js/jsx-runtime"; + +export default function DemoHeader(): JSX.Element { + return ( +
+ +
+ ); +} diff --git a/app/src/components/profile.tsx b/app/src/components/profile.tsx index bbdae37b..dd0c2dea 100644 --- a/app/src/components/profile.tsx +++ b/app/src/components/profile.tsx @@ -124,7 +124,7 @@ class LocalPreview { constructor(element: HTMLCanvasElement, camera: Publish.Broadcast) { // Create a minimal canvas without the background effects - this.canvas = new Canvas(element, { demo: false }); + this.canvas = new Canvas(element); // Create a minimal sound context (muted for preview) this.sound = new Sound(); diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts index 1b446d70..2454f651 100644 --- a/app/src/room/canvas.ts +++ b/app/src/room/canvas.ts @@ -5,10 +5,6 @@ import { BackgroundRenderer } from "./gl/background"; import { Camera } from "./gl/camera"; import { GLContext } from "./gl/context"; -export type CanvasProps = { - demo?: boolean; -}; - export class Canvas { #canvas: HTMLCanvasElement; #glContext: GLContext; @@ -20,7 +16,6 @@ export class Canvas { visible: Signal; viewport: Signal; - demo: Signal; #signals = new Effect(); @@ -40,10 +35,9 @@ export class Canvas { return this.#camera; } - constructor(element: HTMLCanvasElement, props?: CanvasProps) { + constructor(element: HTMLCanvasElement) { this.#canvas = element; - this.demo = new Signal(props?.demo ?? false); this.visible = new Signal(false); this.viewport = new Signal(Vector.create(0, 0)); @@ -121,7 +115,11 @@ export class Canvas { let cancel: number; const render = (now: DOMHighResTimeStamp) => { - this.#render(now); + try { + this.#render(now); + } catch (err) { + console.error("render error", err); + } cancel = requestAnimationFrame(render); }; @@ -132,35 +130,18 @@ export class Canvas { } #render(now: DOMHighResTimeStamp) { - // Update common uniforms for this frame - this.#glContext.uniforms.update(now); - // Clear the screen this.#glContext.clear(); // Render background with shader this.#backgroundRenderer.render(now); - // TODO: Render demo text if enabled - // if (this.demo.peek()) { - // this.#renderDemo(now); - // } - // Render broadcasts if (this.onRender) { - try { - this.onRender(now); - } catch (err) { - console.error("render error", err); - } + this.onRender(now); } } - // TODO: Implement demo text rendering with WebGL - // #renderDemo(now: DOMHighResTimeStamp) { - // // Render "DEMO" text at various positions - // } - toggleFullscreen() { if (document.fullscreenElement) { document.exitFullscreen(); diff --git a/app/src/room/gl/common.ts b/app/src/room/gl/common.ts deleted file mode 100644 index f02d69e9..00000000 --- a/app/src/room/gl/common.ts +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Manages common uniform values shared across multiple shaders. - * Computes values like time once per frame. - */ -export class CommonUniforms { - #startTime: number; - #currentTime: number = 0; - - constructor() { - this.#startTime = performance.now(); - } - - /** - * Update computed values for the current frame. - * Call this once per frame before rendering. - */ - update(now: DOMHighResTimeStamp) { - this.#currentTime = (now - this.#startTime) / 1000; - } - - /** - * Get time value in seconds since creation. - */ - get time(): number { - return this.#currentTime; - } -} diff --git a/app/src/room/gl/context.ts b/app/src/room/gl/context.ts index 7b765484..ff5e3ee5 100644 --- a/app/src/room/gl/context.ts +++ b/app/src/room/gl/context.ts @@ -1,12 +1,10 @@ import { Signal } from "@kixelated/signals"; import { Vector } from "../geometry"; -import { CommonUniforms } from "./common"; export class GLContext { gl: WebGL2RenderingContext; canvas: HTMLCanvasElement; viewport: Signal; - uniforms: CommonUniforms; constructor(canvas: HTMLCanvasElement, viewport: Signal) { const gl = canvas.getContext("webgl2", { @@ -23,7 +21,6 @@ export class GLContext { this.gl = gl; this.canvas = canvas; this.viewport = viewport; - this.uniforms = new CommonUniforms(); // Enable depth testing for z-index ordering gl.enable(gl.DEPTH_TEST); diff --git a/app/src/room/space.ts b/app/src/room/space.ts index c35555df..844078da 100644 --- a/app/src/room/space.ts +++ b/app/src/room/space.ts @@ -596,11 +596,6 @@ export class Space { // Render using WebGL #render(now: DOMHighResTimeStamp) { - // TODO: Render the audio click prompt if audio is suspended - // if (this.sound.suspended.peek() && !this.profile) { - // this.#renderAudioPrompt(); - // } - const broadcasts = this.ordered.peek(); // Render in order: black borders (back) -> audio viz (middle) -> videos (front) @@ -644,11 +639,6 @@ export class Space { } } - // TODO: Implement audio prompt with WebGL or DOM overlay - // #renderAudioPrompt() { - // // "🔊 Click to enable audio" - // } - #runScale(effect: Effect) { const broadcasts = effect.get(this.ordered); if (broadcasts.length === 0) { diff --git a/app/src/room/video.ts b/app/src/room/video.ts index 09c1c3ee..4bbe5374 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -55,6 +55,18 @@ export class Video { this.avatarTexture = this.#gl.createTexture(); this.memeTexture = this.#gl.createTexture(); + // Initialize textures with 1x1 transparent pixel to make them renderable + const emptyPixel = new Uint8Array([0, 0, 0, 0]); + for (const texture of [this.frameTexture, this.avatarTexture, this.memeTexture]) { + this.#gl.bindTexture(this.#gl.TEXTURE_2D, texture); + this.#gl.texImage2D(this.#gl.TEXTURE_2D, 0, this.#gl.RGBA, 1, 1, 0, this.#gl.RGBA, this.#gl.UNSIGNED_BYTE, emptyPixel); + this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_WRAP_S, this.#gl.CLAMP_TO_EDGE); + this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_WRAP_T, this.#gl.CLAMP_TO_EDGE); + this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_MIN_FILTER, this.#gl.LINEAR); + this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_MAG_FILTER, this.#gl.LINEAR); + } + this.#gl.bindTexture(this.#gl.TEXTURE_2D, null); + // Set up texture upload effects this.broadcast.signals.effect(this.#runFrame.bind(this)); this.broadcast.signals.effect(this.#runMeme.bind(this)); diff --git a/app/src/settings.tsx b/app/src/settings.tsx index 54794982..ec242a43 100644 --- a/app/src/settings.tsx +++ b/app/src/settings.tsx @@ -83,7 +83,7 @@ export const Settings = { return parsed; } } - return Math.max(1, window.devicePixelRatio / 2); + return window.devicePixelRatio; })(), ), }, diff --git a/app/src/tauri/update.ts b/app/src/tauri/update.ts index 912dccf0..a76fad8b 100644 --- a/app/src/tauri/update.ts +++ b/app/src/tauri/update.ts @@ -4,6 +4,7 @@ import * as Updater from "@tauri-apps/plugin-updater"; // VERY important that this doesn't throw an error async function check() { try { + console.log("checking for update"); const update = await Updater.check({ allowDowngrades: true }); if (!update) return; diff --git a/justfile b/justfile index 7f18cecf..15d7ce39 100644 --- a/justfile +++ b/justfile @@ -53,10 +53,10 @@ dev: native: cd native && just dev -# Open the Android Studio project -android: - cd native && just android +# Run the Android build, using --open to open Android Studio +android *args: + cd native && just android {{args}} -# Open the Xcode project -ios: - cd native && just ios +# Run the iOS build, using --open to open Xcode +ios *args: + cd native && just ios {{args}} diff --git a/native/gen/apple/hang.xcodeproj/project.pbxproj b/native/gen/apple/hang.xcodeproj/project.pbxproj index a3990e36..6590e642 100644 --- a/native/gen/apple/hang.xcodeproj/project.pbxproj +++ b/native/gen/apple/hang.xcodeproj/project.pbxproj @@ -232,8 +232,8 @@ "$(SRCROOT)/Externals/arm64/${CONFIGURATION}/libapp.a", ); runOnlyForDeploymentPostprocessing = 0; - shellPath = /bin/sh; - shellScript = "bun tauri ios xcode-script -v --platform ${PLATFORM_DISPLAY_NAME:?} --sdk-root ${SDKROOT:?} --framework-search-paths \"${FRAMEWORK_SEARCH_PATHS:?}\" --header-search-paths \"${HEADER_SEARCH_PATHS:?}\" --gcc-preprocessor-definitions \"${GCC_PREPROCESSOR_DEFINITIONS:-}\" --configuration ${CONFIGURATION:?} ${FORCE_COLOR} ${ARCHS:?}\n"; + shellPath = /bin/zsh; + shellScript = "export PATH=\"$HOME/.bun/bin:$PATH\"\neval \"$(/opt/homebrew/bin/brew shellenv)\"\nbun tauri ios xcode-script -v --platform ${PLATFORM_DISPLAY_NAME:?} --sdk-root ${SDKROOT:?} --framework-search-paths \"${FRAMEWORK_SEARCH_PATHS:?}\" --header-search-paths \"${HEADER_SEARCH_PATHS:?}\" --gcc-preprocessor-definitions \"${GCC_PREPROCESSOR_DEFINITIONS:-}\" --configuration ${CONFIGURATION:?} ${FORCE_COLOR} ${ARCHS:?}\n"; }; /* End PBXShellScriptBuildPhase section */ @@ -313,8 +313,10 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_ENTITLEMENTS = hang_iOS/hang_iOS.entitlements; CODE_SIGN_IDENTITY = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; CODE_SIGN_STYLE = Manual; - "DEVELOPMENT_TEAM[sdk=iphoneos*]" = "D7D5SDDB5Z"; + DEVELOPMENT_TEAM = D7D5SDDB5Z; + "DEVELOPMENT_TEAM[sdk=iphoneos*]" = D7D5SDDB5Z; ENABLE_BITCODE = NO; "EXCLUDED_ARCHS[sdk=iphoneos*]" = x86_64; FRAMEWORK_SEARCH_PATHS = ( @@ -342,18 +344,13 @@ ); PRODUCT_BUNDLE_IDENTIFIER = live.hang; PRODUCT_NAME = "hang"; + PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; "PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" = "3a83435e-25c5-4801-ba3d-e7f438c554da"; SDKROOT = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; VALID_ARCHS = arm64; - DEVELOPMENT_TEAM = "D7D5SDDB5Z"; - "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; - PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; }; - PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; - "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; name = release; - PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; }; A981A291F13CD61156C96A03 /* debug */ = { isa = XCBuildConfiguration; @@ -426,8 +423,10 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_ENTITLEMENTS = hang_iOS/hang_iOS.entitlements; CODE_SIGN_IDENTITY = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; CODE_SIGN_STYLE = Manual; - "DEVELOPMENT_TEAM[sdk=iphoneos*]" = "D7D5SDDB5Z"; + DEVELOPMENT_TEAM = D7D5SDDB5Z; + "DEVELOPMENT_TEAM[sdk=iphoneos*]" = D7D5SDDB5Z; ENABLE_BITCODE = NO; "EXCLUDED_ARCHS[sdk=iphoneos*]" = x86_64; FRAMEWORK_SEARCH_PATHS = ( @@ -455,18 +454,13 @@ ); PRODUCT_BUNDLE_IDENTIFIER = live.hang; PRODUCT_NAME = "hang"; + PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; "PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" = "3a83435e-25c5-4801-ba3d-e7f438c554da"; SDKROOT = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; VALID_ARCHS = arm64; - DEVELOPMENT_TEAM = "D7D5SDDB5Z"; - "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; - PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; }; - PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; - "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)"; name = debug; - PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da"; }; /* End XCBuildConfiguration section */ diff --git a/native/gen/apple/hang_iOS/Info.plist b/native/gen/apple/hang_iOS/Info.plist index 93009027..547a368b 100644 --- a/native/gen/apple/hang_iOS/Info.plist +++ b/native/gen/apple/hang_iOS/Info.plist @@ -44,5 +44,7 @@ Request camera access NSMicrophoneUsageDescription Request microphone access + ITSAppUsesNonExemptEncryption + \ No newline at end of file diff --git a/native/justfile b/native/justfile index c7c37f18..3996f6fa 100644 --- a/native/justfile +++ b/native/justfile @@ -26,11 +26,11 @@ deploy platform: dev: bun tauri dev -android: - bun tauri android dev --host --open +android *args: + bun tauri android dev --host {{args}} -ios: - bun tauri ios dev --host +ios *args: + bun tauri ios dev --host {{args}} # Generate Tauri icons from browser-generated ZIP files # Prerequisites: Download all ZIP files from http://localhost:1420/dev/icons to this directory diff --git a/native/src/lib.rs b/native/src/lib.rs index 428cce59..db426ace 100644 --- a/native/src/lib.rs +++ b/native/src/lib.rs @@ -1,4 +1,4 @@ -use tauri::{Emitter, Manager}; +use tauri::Emitter; #[cfg_attr(mobile, tauri::mobile_entry_point)] pub fn run() { @@ -57,6 +57,7 @@ async fn start_server(window: tauri::Window) -> Result { // Focus the main window #[cfg(desktop)] { + use tauri::Manager; let _ = window.get_webview_window("main").expect("no main window").set_focus(); } }) From c4b9dd9460dd3e77d09579743d60e7612d8ff1bc Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 14:35:40 -0700 Subject: [PATCH 18/19] Fix the fake. --- app/src/about.tsx | 8 ++----- app/src/room/chat.ts | 7 ++++++ app/src/room/fake.ts | 51 ++++++++++++++++++++++++++++++++------------ app/src/room/name.ts | 19 ++++++++++++++--- 4 files changed, 62 insertions(+), 23 deletions(-) diff --git a/app/src/about.tsx b/app/src/about.tsx index e349a31b..d682bdcf 100644 --- a/app/src/about.tsx +++ b/app/src/about.tsx @@ -118,11 +118,11 @@ export function About(): JSX.Element { () => { }, () => { two.user.name.set("omni-chan"); - two.user.avatar.set("/avatar/omni.jpg"); + two.show(new URL("/avatar/omni.jpg", import.meta.url)); }, () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("oops wrong button"), - () => two.user.avatar.set("/avatar/43.svg"), + () => two.stop(), () => three.chat.typing.active.set(true), () => three.chat.message.latest.set("dude"), () => two.play(new URL("/meme/linus.mp4", import.meta.url)), @@ -271,10 +271,6 @@ export function About(): JSX.Element { MoQ. There's more to live than another {services[currentService()]}{" "} clone. Crazy, I know.

- -
- we are live -
); diff --git a/app/src/room/chat.ts b/app/src/room/chat.ts index 53f19c41..60a9d757 100644 --- a/app/src/room/chat.ts +++ b/app/src/room/chat.ts @@ -124,6 +124,13 @@ export class Chat { updatePosition(bounds, viewport); }); + // Update position when window scrolls + effect.event(window, "scroll", () => { + const bounds = this.broadcast.bounds.peek(); + const viewport = this.broadcast.canvas.viewport.peek(); + updatePosition(bounds, viewport); + }, { passive: true }); + effect.effect((effect) => { const typing = effect.get(this.broadcast.source.chat.typing.active); DOM.setClass(effect, icon, typing ? "icon-[mdi--chat-typing]" : "icon-[mdi--chat]"); diff --git a/app/src/room/fake.ts b/app/src/room/fake.ts index 7f45e30f..96ce5447 100644 --- a/app/src/room/fake.ts +++ b/app/src/room/fake.ts @@ -97,6 +97,7 @@ export class FakeBroadcast { }); } + // Plays a video file. play(src: URL) { const video = document.createElement("video"); video.src = src.toString(); @@ -117,18 +118,11 @@ export class FakeBroadcast { this.#video = video; const onFrame = () => { - if (!video.paused && !video.ended) { - this.video.frame.update((prev) => { - prev?.close(); - return new VideoFrame(video); - }); - video.requestVideoFrameCallback(onFrame); - } else { - this.video.frame.update((prev) => { - prev?.close(); - return undefined; - }); - } + this.video.frame.update((prev) => { + prev?.close(); + return new VideoFrame(video); + }); + video.requestVideoFrameCallback(onFrame); }; video.requestVideoFrameCallback(onFrame); @@ -147,18 +141,47 @@ export class FakeBroadcast { ]); }; + video.onended = () => this.stop(); + const source = new MediaElementAudioSourceNode(this.sound.context, { mediaElement: video }); this.audio.root.set(source); } + // "plays" an image file. + show(src: URL) { + const image = new Image(); + image.src = src.toString(); + image.onload = () => { + this.video.frame.update((prev) => { + prev?.close(); + return new VideoFrame(image, { timestamp: 0 }); + }); + + this.video.catalog.set([ + { + track: "image", + config: { + codec: "fake", + displayAspectWidth: u53(image.width), + displayAspectHeight: u53(image.height), + }, + }, + ]); + }; + } + stop() { - this.#video?.pause(); - this.#video = undefined; + this.video.frame.update((prev) => { + prev?.close(); + return undefined; + }); this.audio.root.update((prev) => { prev?.disconnect(); return undefined; }); + + this.video.catalog.set(undefined); } close() { diff --git a/app/src/room/name.ts b/app/src/room/name.ts index 45a86b02..7b066ad7 100644 --- a/app/src/room/name.ts +++ b/app/src/room/name.ts @@ -53,13 +53,19 @@ export class Name { // Position name at top-left of broadcast with offset const fontSize = 12; const offset = 12; - const left = Math.round(pageBounds.x + offset); - const top = Math.round(pageBounds.y + offset); + + // Clamp position to stay within canvas bounds + const left = Math.round(Math.max(canvasRect.left + offset, Math.min(pageBounds.x + offset, canvasRect.right - offset))); + const top = Math.round(Math.max(canvasRect.top + offset, Math.min(pageBounds.y + offset, canvasRect.bottom - fontSize - offset))); root.style.left = `${left}px`; root.style.top = `${top}px`; root.style.fontSize = `${fontSize}px`; - root.style.maxWidth = `${Math.max(0, pageBounds.width - 2 * offset)}px`; + + // Max width should be constrained by both broadcast width and canvas bounds + const maxWidthFromBroadcast = Math.max(0, pageBounds.width - 2 * offset); + const maxWidthFromCanvas = Math.max(0, canvasRect.right - left - offset); + root.style.maxWidth = `${Math.min(maxWidthFromBroadcast, maxWidthFromCanvas)}px`; }; // Update name text @@ -75,6 +81,13 @@ export class Name { updatePosition(bounds, viewport); }); + // Update position when window scrolls + effect.event(window, "scroll", () => { + const bounds = this.broadcast.bounds.peek(); + const viewport = this.broadcast.canvas.viewport.peek(); + updatePosition(bounds, viewport); + }, { passive: true }); + // Update z-index based on broadcast position effect.effect((effect) => { const z = effect.get(this.broadcast.position).z; From 42062a7734df7bdb849dcd112932e4003ff5510b Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Tue, 7 Oct 2025 16:26:18 -0700 Subject: [PATCH 19/19] Flip --- app/src/about.tsx | 42 ++++++++--------- app/src/components/demo-header.tsx | 4 +- app/src/room/chat.ts | 15 ++++--- app/src/room/fake.ts | 4 -- app/src/room/gl/broadcast.ts | 5 +++ app/src/room/gl/shaders/broadcast.frag | 6 ++- app/src/room/name.ts | 26 ++++++++--- app/src/room/video.ts | 62 +++++++++++++++++++++----- app/src/settings.tsx | 2 +- moq | 2 +- 10 files changed, 114 insertions(+), 54 deletions(-) diff --git a/app/src/about.tsx b/app/src/about.tsx index d682bdcf..cb1afde3 100644 --- a/app/src/about.tsx +++ b/app/src/about.tsx @@ -82,7 +82,7 @@ export function About(): JSX.Element { () => two.chat.message.latest.set("yo"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("how's life as a simulation?"), - () => { }, + () => {}, () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("okay I guess"), () => two.location.window.position.update((prev) => ({ ...prev, s: 1.5 })), @@ -112,10 +112,10 @@ export function About(): JSX.Element { }, () => three.chat.typing.active.set(true), () => three.chat.message.latest.set("hello"), - () => { }, + () => {}, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("try turning your webcam on"), - () => { }, + () => {}, () => { two.user.name.set("omni-chan"); two.show(new URL("/avatar/omni.jpg", import.meta.url)); @@ -129,21 +129,21 @@ export function About(): JSX.Element { () => three.location.window.position.set(randomLocation()), () => three.chat.typing.active.set(true), () => three.chat.message.latest.set("omg"), - () => { }, - () => { }, + () => {}, + () => {}, () => two.stop(), () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("on second thought, maybe not"), - () => { }, + () => {}, () => three.chat.typing.active.set(true), () => three.chat.message.latest.set("gotta run"), () => room.remove("3"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("lame"), - () => { }, + () => {}, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("try typing /huh"), - () => { }, + () => {}, () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("/huh"), () => two.location.window.position.set({ x: -0.5, y: 0 }), @@ -154,7 +154,7 @@ export function About(): JSX.Element { () => one.chat.message.latest.set("and audio panning"), () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("huh?"), - () => { }, + () => {}, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("inviting the squad"), () => { @@ -171,12 +171,12 @@ export function About(): JSX.Element { () => five.chat.message.latest.set("yo"), () => six.chat.typing.active.set(true), () => six.chat.message.latest.set("poop"), - () => { }, - () => { }, + () => {}, + () => {}, () => two.chat.typing.active.set(true), () => two.chat.message.latest.set("good lord"), - () => { }, - () => { }, + () => {}, + () => {}, () => six.chat.typing.active.set(true), () => six.chat.message.latest.set("let's watch something"), () => { @@ -195,12 +195,12 @@ export function About(): JSX.Element { two.chat.message.latest.set("this shit again"); }, () => two.location.window.position.set(randomLeft()), - () => { }, - () => { }, - () => { }, - () => { }, - () => { }, - () => { }, + () => {}, + () => {}, + () => {}, + () => {}, + () => {}, + () => {}, () => room.remove("7"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("okay, that's enough of a demo"), @@ -209,7 +209,7 @@ export function About(): JSX.Element { () => room.remove("6"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("there's a lot more, but it's hard to demo"), - () => { }, + () => {}, () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("[start a hang](https://hang.live/home)"), () => two.audio.captions.text.set("like automatic captions"), @@ -219,7 +219,7 @@ export function About(): JSX.Element { () => room.remove("2"), () => one.chat.typing.active.set(true), () => one.chat.message.latest.set("enjoy"), - () => { }, + () => {}, () => room.remove("1"), ]; diff --git a/app/src/components/demo-header.tsx b/app/src/components/demo-header.tsx index 6105d633..0d620ea6 100644 --- a/app/src/components/demo-header.tsx +++ b/app/src/components/demo-header.tsx @@ -3,9 +3,7 @@ import type { JSX } from "solid-js/jsx-runtime"; export default function DemoHeader(): JSX.Element { return (
- +
); } diff --git a/app/src/room/chat.ts b/app/src/room/chat.ts index 60a9d757..f7416274 100644 --- a/app/src/room/chat.ts +++ b/app/src/room/chat.ts @@ -125,11 +125,16 @@ export class Chat { }); // Update position when window scrolls - effect.event(window, "scroll", () => { - const bounds = this.broadcast.bounds.peek(); - const viewport = this.broadcast.canvas.viewport.peek(); - updatePosition(bounds, viewport); - }, { passive: true }); + effect.event( + window, + "scroll", + () => { + const bounds = this.broadcast.bounds.peek(); + const viewport = this.broadcast.canvas.viewport.peek(); + updatePosition(bounds, viewport); + }, + { passive: true }, + ); effect.effect((effect) => { const typing = effect.get(this.broadcast.source.chat.typing.active); diff --git a/app/src/room/fake.ts b/app/src/room/fake.ts index 96ce5447..c18e425e 100644 --- a/app/src/room/fake.ts +++ b/app/src/room/fake.ts @@ -63,8 +63,6 @@ export class FakeBroadcast { signals = new Effect(); - #video: HTMLVideoElement | undefined; - constructor(sound: Sound, props?: FakeBroadcastProps) { this.sound = sound; @@ -115,8 +113,6 @@ export class FakeBroadcast { video.load(); video.play(); - this.#video = video; - const onFrame = () => { this.video.frame.update((prev) => { prev?.close(); diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts index 36414cf1..0310e632 100644 --- a/app/src/room/gl/broadcast.ts +++ b/app/src/room/gl/broadcast.ts @@ -27,6 +27,7 @@ export class BroadcastRenderer { #u_memeTexture: Uniform1i; #u_memeOpacity: Uniform1f; #u_memeBounds: Uniform4f; + #u_flip: Uniform1i; // Typed attributes #a_position: Attribute; @@ -50,6 +51,7 @@ export class BroadcastRenderer { this.#u_memeTexture = this.#program.createUniform1i("u_memeTexture"); this.#u_memeOpacity = this.#program.createUniform1f("u_memeOpacity"); this.#u_memeBounds = this.#program.createUniform4f("u_memeBounds"); + this.#u_flip = this.#program.createUniform1i("u_flip"); // Initialize typed attributes this.#a_position = this.#program.createAttribute("a_position"); @@ -169,6 +171,9 @@ export class BroadcastRenderer { this.#u_frameOpacity.set(broadcast.video.frameOpacity); this.#u_memeOpacity.set(broadcast.video.memeOpacity); + // Set flip flag + this.#u_flip.set(broadcast.video.flip.peek() ? 1 : 0); + gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, broadcast.video.frameTexture); this.#u_frameTexture.set(0); diff --git a/app/src/room/gl/shaders/broadcast.frag b/app/src/room/gl/shaders/broadcast.frag index eb608a45..eadbe3e2 100644 --- a/app/src/room/gl/shaders/broadcast.frag +++ b/app/src/room/gl/shaders/broadcast.frag @@ -8,6 +8,7 @@ uniform sampler2D u_frameTexture; uniform sampler2D u_avatarTexture; uniform sampler2D u_memeTexture; uniform bool u_avatarActive; +uniform bool u_flip; // Whether to flip the frame texture horizontally uniform float u_radius; uniform vec2 u_size; uniform float u_opacity; @@ -38,8 +39,11 @@ void main() { // Smooth edge antialiasing float alpha = 1.0 - smoothstep(-1.0, 0.0, dist); + // Calculate texture coordinates (flip horizontally if needed for frame) + vec2 frameTexCoord = u_flip ? vec2(1.0 - v_texCoord.x, v_texCoord.y) : v_texCoord; + // Sample textures using pre-computed opacity values - vec4 frameColor = u_frameOpacity > 0.0 ? texture(u_frameTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); + vec4 frameColor = u_frameOpacity > 0.0 ? texture(u_frameTexture, frameTexCoord) : vec4(0.0, 0.0, 0.0, 1.0); vec4 avatarColor = u_avatarActive && u_frameOpacity < 1.0 ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0); vec4 baseColor = mix(avatarColor, frameColor, u_frameOpacity); diff --git a/app/src/room/name.ts b/app/src/room/name.ts index 7b066ad7..b9e339f0 100644 --- a/app/src/room/name.ts +++ b/app/src/room/name.ts @@ -55,8 +55,15 @@ export class Name { const offset = 12; // Clamp position to stay within canvas bounds - const left = Math.round(Math.max(canvasRect.left + offset, Math.min(pageBounds.x + offset, canvasRect.right - offset))); - const top = Math.round(Math.max(canvasRect.top + offset, Math.min(pageBounds.y + offset, canvasRect.bottom - fontSize - offset))); + const left = Math.round( + Math.max(canvasRect.left + offset, Math.min(pageBounds.x + offset, canvasRect.right - offset)), + ); + const top = Math.round( + Math.max( + canvasRect.top + offset, + Math.min(pageBounds.y + offset, canvasRect.bottom - fontSize - offset), + ), + ); root.style.left = `${left}px`; root.style.top = `${top}px`; @@ -82,11 +89,16 @@ export class Name { }); // Update position when window scrolls - effect.event(window, "scroll", () => { - const bounds = this.broadcast.bounds.peek(); - const viewport = this.broadcast.canvas.viewport.peek(); - updatePosition(bounds, viewport); - }, { passive: true }); + effect.event( + window, + "scroll", + () => { + const bounds = this.broadcast.bounds.peek(); + const viewport = this.broadcast.canvas.viewport.peek(); + updatePosition(bounds, viewport); + }, + { passive: true }, + ); // Update z-index based on broadcast position effect.effect((effect) => { diff --git a/app/src/room/video.ts b/app/src/room/video.ts index 4bbe5374..136444a5 100644 --- a/app/src/room/video.ts +++ b/app/src/room/video.ts @@ -1,3 +1,4 @@ +import { Publish, Watch } from "@kixelated/hang"; import { Effect, Signal } from "@kixelated/signals"; import * as Api from "../api"; import Settings from "../settings"; @@ -45,6 +46,9 @@ export class Video { // Render avatars and emojis at this size #renderSize = new Signal(128); + // Whether to flip the video horizontally (for self-preview) + flip = new Signal(false); + constructor(broadcast: Broadcast) { this.broadcast = broadcast; @@ -59,7 +63,17 @@ export class Video { const emptyPixel = new Uint8Array([0, 0, 0, 0]); for (const texture of [this.frameTexture, this.avatarTexture, this.memeTexture]) { this.#gl.bindTexture(this.#gl.TEXTURE_2D, texture); - this.#gl.texImage2D(this.#gl.TEXTURE_2D, 0, this.#gl.RGBA, 1, 1, 0, this.#gl.RGBA, this.#gl.UNSIGNED_BYTE, emptyPixel); + this.#gl.texImage2D( + this.#gl.TEXTURE_2D, + 0, + this.#gl.RGBA, + 1, + 1, + 0, + this.#gl.RGBA, + this.#gl.UNSIGNED_BYTE, + emptyPixel, + ); this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_WRAP_S, this.#gl.CLAMP_TO_EDGE); this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_WRAP_T, this.#gl.CLAMP_TO_EDGE); this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_MIN_FILTER, this.#gl.LINEAR); @@ -74,10 +88,22 @@ export class Video { this.broadcast.signals.effect(this.#runAvatar.bind(this)); this.broadcast.signals.effect(this.#runTargetSize.bind(this)); this.broadcast.signals.effect(this.#runMemeTransition.bind(this)); + this.broadcast.signals.effect(this.#runFlip.bind(this)); this.broadcast.signals.effect(this.#runRenderSize.bind(this)); } + #runFlip(effect: Effect) { + // Flipping is a mess because there's no way to encode a flipped frame, only to decode it flipped. + if (this.broadcast.source instanceof Publish.Broadcast) { + const flip = effect.get(this.broadcast.source.video.hd.config)?.flip ?? false; + this.flip.set(flip); + } else if (this.broadcast.source instanceof Watch.Broadcast) { + const flip = effect.get(this.broadcast.source.video.active)?.config.flip ?? false; + this.flip.set(flip); + } + } + #runAvatar(effect: Effect) { let avatar = effect.get(this.broadcast.source.user.avatar); if (!avatar) { @@ -105,21 +131,35 @@ export class Video { newAvatar.src = avatar; // Once the avatar loads, upload it to the texture - effect.event(newAvatar, "load", this.#uploadAvatar.bind(this, newAvatar)); + effect.event(newAvatar, "load", () => { + const avatarSize = Vector.create( + newAvatar.naturalWidth || newAvatar.width, + newAvatar.naturalHeight || newAvatar.height, + ); + effect.set(this.avatarSize, avatarSize); + + effect.effect((effect) => { + const size = effect.get(this.#renderSize); + this.#imageToTexture(newAvatar, this.avatarTexture, size); + }); + }); } - #uploadAvatar(avatar: HTMLImageElement) { - this.avatar = avatar; - this.avatarSize.set(Vector.create(avatar.naturalWidth || avatar.width, avatar.naturalHeight || avatar.height)); + #imageToTexture(src: HTMLImageElement, dst: WebGLTexture, size: number) { + const canvas = document.createElement("canvas"); + canvas.width = size; + canvas.height = size; + const ctx = canvas.getContext("2d"); + if (!ctx) throw new Error("Failed to get context"); + ctx.drawImage(src, 0, 0, size, size); const gl = this.#gl; - gl.bindTexture(gl.TEXTURE_2D, this.avatarTexture); + gl.bindTexture(gl.TEXTURE_2D, dst); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); - gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, avatar); - gl.generateMipmap(gl.TEXTURE_2D); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas); gl.bindTexture(gl.TEXTURE_2D, null); } @@ -350,10 +390,10 @@ export class Video { #runRenderSize(effect: Effect) { const scale = effect.get(Settings.render.scale); const target = effect.get(this.broadcast.bounds).size; - const size = Math.min(target.x, target.y) * scale; + const size = Math.sqrt(target.x * target.y) * scale; // Increase to the nearest power of 2 const power = Math.ceil(Math.log2(size)); - this.#renderSize.set(2 ** power); + this.#renderSize.set(Math.min(2 ** power, 512 * scale)); } // Update opacity values based on current time (called once per frame) diff --git a/app/src/settings.tsx b/app/src/settings.tsx index ec242a43..19ab0414 100644 --- a/app/src/settings.tsx +++ b/app/src/settings.tsx @@ -374,7 +374,7 @@ export function Modal(props: { sound: Sound }): JSX.Element {
Pixel Ratio - Decrease for better performance + Increase for better quality, but worse performance.
{pixelRatioOptions.map((ratio) => ( diff --git a/moq b/moq index 10bc4d28..1223704c 160000 --- a/moq +++ b/moq @@ -1 +1 @@ -Subproject commit 10bc4d287d9605a01469ba82b263f60c21352da3 +Subproject commit 1223704cfaff96852c8c58ab466af17f15486745