diff --git a/app/index.css b/app/index.css
index 2e0e1592..46a0d6ec 100644
--- a/app/index.css
+++ b/app/index.css
@@ -161,3 +161,18 @@ main {
input[type="range"] {
accent-color: hsl(var(--link-hue), 75%, 50%);
}
+
+/* Locator throb animation */
+@keyframes throb {
+ 0%,
+ 100% {
+ transform: scale(1);
+ }
+ 50% {
+ transform: scale(1.1);
+ }
+}
+
+.animate-throb {
+ animation: throb 2s ease-in-out infinite;
+}
diff --git a/app/package.json b/app/package.json
index 5f15accd..12a1605f 100644
--- a/app/package.json
+++ b/app/package.json
@@ -7,7 +7,8 @@
"scripts": {
"dev": "bunx --bun vite --open",
"build": "bunx --bun vite build",
- "check": "tsc --noEmit && biome check",
+ "check": "tsc --noEmit && biome check && bun run check:shaders",
+ "check:shaders": "glslangValidator src/room/gl/shaders/*.vert src/room/gl/shaders/*.frag",
"fix": "biome check --fix",
"tauri": "tauri"
},
diff --git a/app/src/about.tsx b/app/src/about.tsx
index 0d16b459..cb1afde3 100644
--- a/app/src/about.tsx
+++ b/app/src/about.tsx
@@ -1,5 +1,8 @@
+import solid from "@kixelated/signals/solid";
import { createEffect, createSignal, type JSX, onCleanup } from "solid-js";
+import AudioPrompt from "./components/audio-prompt";
import CreateHang from "./components/create";
+import DemoHeader from "./components/demo-header";
import Layout from "./layout/web";
import { Canvas } from "./room/canvas";
import { FakeRoom } from "./room/fake";
@@ -7,9 +10,15 @@ import { FakeRoom } from "./room/fake";
export function About(): JSX.Element {
const canvas = ;
- const room = new FakeRoom(new Canvas(canvas as HTMLCanvasElement, { demo: true }));
+ const room = new FakeRoom(new Canvas(canvas as HTMLCanvasElement));
onCleanup(() => room.close());
+ const audioSuspended = solid(room.sound.suspended);
+
+ const handleEnableAudio = () => {
+ room.sound.enabled.set(true);
+ };
+
const services = ["Meet", "Zoom", "Teams", "Discord", "Skype", "WebEx", "FaceTime", "WhatsApp"];
const [currentService, setCurrentService] = createSignal(0);
@@ -109,11 +118,11 @@ export function About(): JSX.Element {
() => {},
() => {
two.user.name.set("omni-chan");
- two.user.avatar.set("/avatar/omni.jpg");
+ two.show(new URL("/avatar/omni.jpg", import.meta.url));
},
() => two.chat.typing.active.set(true),
() => two.chat.message.latest.set("oops wrong button"),
- () => two.user.avatar.set("/avatar/43.svg"),
+ () => two.stop(),
() => three.chat.typing.active.set(true),
() => three.chat.message.latest.set("dude"),
() => two.play(new URL("/meme/linus.mp4", import.meta.url)),
@@ -251,17 +260,17 @@ export function About(): JSX.Element {
-
{canvas}
+
Powered by new and open source web tech:{" "}
MoQ . There's more to live than another {services[currentService()]}{" "}
clone. Crazy , I know.
-
-
-
-
);
diff --git a/app/src/account.tsx b/app/src/account.tsx
index 47ad23a9..bda89205 100644
--- a/app/src/account.tsx
+++ b/app/src/account.tsx
@@ -114,8 +114,8 @@ function AccountLoad(): JSX.Element {
Delete Account?
- This action cannot be undone. Your account, profile information, and all associated data will be
- permanently deleted.
+ This action cannot be undone. Your account, profile information, and all associated data
+ will be permanently deleted.
void }): JSX.Element {
+ return (
+
+
+
+
+
+ Click to enable audio
+
+
+
+
+ );
+}
diff --git a/app/src/components/badge.ts b/app/src/components/badge.ts
index 10a28478..89b04f89 100644
--- a/app/src/components/badge.ts
+++ b/app/src/components/badge.ts
@@ -4,14 +4,20 @@ import * as Tauri from "../tauri";
async function set(count: number | undefined) {
if (Tauri.Api) {
- await Tauri.Api.window
+ const success = await Tauri.Api.window
.getCurrentWindow()
.setBadgeCount(count || undefined)
- .catch((error) => console.warn("Failed to set Tauri badge:", error));
- } else if (navigator.setAppBadge) {
- await navigator
+ .then(() => true)
+ .catch(() => false);
+ if (success) return;
+ }
+
+ if (navigator.setAppBadge) {
+ const success = await navigator
.setAppBadge(count || undefined)
- .catch((error) => console.warn("Failed to set Web badge:", error));
+ .then(() => true)
+ .catch(() => false);
+ if (success) return;
}
}
diff --git a/app/src/components/demo-header.tsx b/app/src/components/demo-header.tsx
new file mode 100644
index 00000000..0d620ea6
--- /dev/null
+++ b/app/src/components/demo-header.tsx
@@ -0,0 +1,9 @@
+import type { JSX } from "solid-js/jsx-runtime";
+
+export default function DemoHeader(): JSX.Element {
+ return (
+
+ );
+}
diff --git a/app/src/components/profile.tsx b/app/src/components/profile.tsx
index bbdae37b..dd0c2dea 100644
--- a/app/src/components/profile.tsx
+++ b/app/src/components/profile.tsx
@@ -124,7 +124,7 @@ class LocalPreview {
constructor(element: HTMLCanvasElement, camera: Publish.Broadcast) {
// Create a minimal canvas without the background effects
- this.canvas = new Canvas(element, { demo: false });
+ this.canvas = new Canvas(element);
// Create a minimal sound context (muted for preview)
this.sound = new Sound();
diff --git a/app/src/index.tsx b/app/src/index.tsx
index cfc6e126..154b2c7d 100644
--- a/app/src/index.tsx
+++ b/app/src/index.tsx
@@ -25,7 +25,7 @@ import { Canvas } from "./room/canvas";
import { Sup } from "./sup";
export function Hang(): JSX.Element {
- const background = ( ) as HTMLCanvasElement;
+ const background = ( ) as HTMLCanvasElement;
const canvas = new Canvas(background);
onCleanup(() => canvas.close());
diff --git a/app/src/privacy.tsx b/app/src/privacy.tsx
index 5ed9937a..965d8c25 100644
--- a/app/src/privacy.tsx
+++ b/app/src/privacy.tsx
@@ -8,32 +8,41 @@ export default function Privacy() {
Last Updated: October 2, 2025
- Philosophy
+
+ Philosophy
+
- We believe in the right to privacy.
- Have fun and be weird; you're not being judged (by us at least).
+ We believe in the right to privacy. Have fun and be weird; you're not being judged (by us at
+ least).
- What We Collect
+
+ What We Collect
+
We collect minimal information to provide our service:
- Account Information : When you sign in with a linked provider (ex. Google, Discord, or Apple), we store
- your email address, display name, and avatar. You can replace your name and avatar at any time.
+ Account Information : When you sign in with a linked provider (ex. Google,
+ Discord, or Apple), we store your email address, display name, and avatar. You can replace
+ your name and avatar at any time.
- Session State : An authentication token and any user preferences are stored in your browser's local storage. It is cleared when you log out.
+ Session State : An authentication token and any user preferences are stored
+ in your browser's local storage. It is cleared when you log out.
- Media Cache : We cache seconds worth of media to improve the playback experience. It is cleared immediately after disconnecting.
+ Media Cache : We cache seconds worth of media to improve the playback
+ experience. It is cleared immediately after disconnecting.
- What We Don't Collect
+
+ What We Don't Collect
+
We do not:
Store any video/audio/conversations
@@ -44,7 +53,9 @@ export default function Privacy() {
- How We Use Your Information
+
+ How We Use Your Information
+
Your account information is used to:
Identify you when you're logged in
@@ -53,7 +64,9 @@ export default function Privacy() {
- Data Storage
+
+ Data Storage
+
Account information is stored securely on our servers
Authentication tokens are stored locally in your browser
@@ -62,7 +75,9 @@ export default function Privacy() {
- Third-Party Authentication
+
+ Third-Party Authentication
+
We use Google, Discord, and Apple for sign-in. When you authenticate, you're subject to their
privacy policies. We only receive basic profile information with your consent.
@@ -70,7 +85,9 @@ export default function Privacy() {
- Data Sharing
+
+ Data Sharing
+
We do not sell, rent, or share your information
We do not display ads or work with advertisers
@@ -79,7 +96,9 @@ export default function Privacy() {
- Hang Privacy
+
+ Hang Privacy
+
All hangs are public to anyone with the URL
Do not share sensitive information in hangs
@@ -89,24 +108,40 @@ export default function Privacy() {
- Your Rights
+
+ Your Rights
+
- Changes to This Policy
+
+ Changes to This Policy
+
We'll notify users via email of significant policy changes.
- )
+ );
}
diff --git a/app/src/room/audio.ts b/app/src/room/audio.ts
index 2daeab84..1452f378 100644
--- a/app/src/room/audio.ts
+++ b/app/src/room/audio.ts
@@ -29,8 +29,8 @@ export class Audio {
#volumeSmoothed = 0;
- #speaking = false;
- #speakingAlpha = 0;
+ // Public volume for visualization (0 to 1)
+ volume = 0;
#signals = new Effect();
@@ -44,7 +44,7 @@ export class Audio {
const meme = effect.get(this.broadcast.meme);
if (!meme) return;
- const source = new MediaElementAudioSourceNode(this.sound.context, { mediaElement: meme });
+ const source = new MediaElementAudioSourceNode(this.sound.context, { mediaElement: meme.element });
// Use the existing notifications context so we don't need to create our own panner/volume.
this.sound.connect(source);
@@ -98,12 +98,6 @@ export class Audio {
if (!(this.broadcast.source instanceof Publish.Broadcast)) {
this.#signals.effect(this.#runOutput.bind(this));
}
-
- // Track speaking state from publish broadcast
- this.#signals.effect((effect) => {
- const speaking = effect.get(this.broadcast.source.audio.speaking.active);
- this.#speaking = speaking ?? false;
- });
}
#runOutput(effect: Effect) {
@@ -134,107 +128,44 @@ export class Audio {
}
}
- renderBackground(ctx: CanvasRenderingContext2D) {
- ctx.save();
-
- const bounds = this.broadcast.bounds.peek();
-
- ctx.translate(bounds.position.x, bounds.position.y);
-
- const RADIUS = 12 * this.broadcast.zoom.peek();
- const PADDING = 12 * this.broadcast.zoom.peek();
-
- // Background outline
- ctx.beginPath();
- this.#roundedRectPath(
- ctx,
- -PADDING,
- -PADDING,
- bounds.size.x + PADDING * 2,
- bounds.size.y + PADDING * 2,
- RADIUS,
- );
- ctx.fillStyle = "#000";
- ctx.fill();
-
- ctx.restore();
- }
-
- render(ctx: CanvasRenderingContext2D) {
- // Compute average volume
- const analyserBuffer = this.sound.analyze();
- if (!analyserBuffer) return; // undefined in potato mode
-
- const bounds = this.broadcast.bounds.peek();
- const scale = this.broadcast.zoom.peek();
-
- ctx.save();
- ctx.translate(bounds.position.x, bounds.position.y);
-
- const PADDING = 12 * scale;
- const RADIUS = 12 * scale;
+ tick() {
+ // Get audio from the notification/meme context
+ const soundBuffer = this.sound.analyze();
+ if (!soundBuffer) {
+ this.volume *= 0.95; // Fade out when no analyser
+ return;
+ }
- // Take the absolute value of the distance from 128, which is silence.
- for (let i = 0; i < this.#analyserBuffer.length; i++) {
- analyserBuffer[i] = Math.abs(analyserBuffer[i] - 128);
+ // Take the absolute value of the distance from 128 (silence)
+ for (let i = 0; i < soundBuffer.length; i++) {
+ soundBuffer[i] = Math.abs(soundBuffer[i] - 128);
}
- // If the audio is playing, combine the buffers.
+ // If the broadcast audio is playing, combine the buffers
if (this.#analyser) {
- if (this.#analyserBuffer.length !== analyserBuffer.length) {
+ if (this.#analyserBuffer.length !== soundBuffer.length) {
throw new Error("analyser buffer length mismatch");
}
this.#analyser.getByteTimeDomainData(this.#analyserBuffer);
for (let i = 0; i < this.#analyserBuffer.length; i++) {
- analyserBuffer[i] += Math.abs(this.#analyserBuffer[i] - 128);
+ soundBuffer[i] += Math.abs(this.#analyserBuffer[i] - 128);
}
}
+ // Calculate RMS volume
let sum = 0;
- for (let i = 0; i < this.#analyserBuffer.length; i++) {
- const sample = analyserBuffer[i];
+ for (let i = 0; i < soundBuffer.length; i++) {
+ const sample = soundBuffer[i];
sum += sample * sample;
}
- const volume = Math.sqrt(sum) / this.#analyserBuffer.length;
- this.#volumeSmoothed = this.#volumeSmoothed * 0.7 + volume * 0.3;
-
- // Colored fill based on volume and speaking state
- const expand = PADDING * Math.min(1, this.#volumeSmoothed - 0.01);
-
- ctx.beginPath();
- this.#roundedRectPath(ctx, -expand, -expand, bounds.size.x + expand * 2, bounds.size.y + expand * 2, RADIUS);
-
- const hue = 180 + this.#volumeSmoothed * 120;
- const alpha = 0.3 + this.#volumeSmoothed * 0.4;
-
- ctx.fillStyle = `hsla(${hue}, 80%, 45%, ${alpha})`;
- ctx.fill();
-
- // Ramp up/down the speaking alpha based on the speaking state.
- this.#speakingAlpha = Math.max(Math.min(1, this.#speakingAlpha + (this.#speaking ? 0.1 : -0.1)), 0);
-
- // Add an additional border if we're speaking, ramping up/down the alpha
- if (this.#speakingAlpha > 0) {
- ctx.strokeStyle = `hsla(${hue}, 80%, 45%, ${this.#speakingAlpha})`;
- ctx.lineWidth = 6 * scale;
- ctx.stroke();
- }
+ const volume = Math.sqrt(sum) / soundBuffer.length;
- ctx.restore();
- }
+ // Smooth the volume with exponential moving average
+ this.#volumeSmoothed = this.#volumeSmoothed * 0.7 + volume * 0.3;
- #roundedRectPath(ctx: CanvasRenderingContext2D, x: number, y: number, w: number, h: number, r: number) {
- const maxR = Math.min(r, w / 2, h / 2);
- ctx.moveTo(x + maxR, y);
- ctx.lineTo(x + w - maxR, y);
- ctx.quadraticCurveTo(x + w, y, x + w, y + maxR);
- ctx.lineTo(x + w, y + h - maxR);
- ctx.quadraticCurveTo(x + w, y + h, x + w - maxR, y + h);
- ctx.lineTo(x + maxR, y + h);
- ctx.quadraticCurveTo(x, y + h, x, y + h - maxR);
- ctx.lineTo(x, y + maxR);
- ctx.quadraticCurveTo(x, y, x + maxR, y);
+ // Store the smoothed volume (already in the right range from the buffer values)
+ this.volume = this.#volumeSmoothed;
}
close() {
diff --git a/app/src/room/broadcast.ts b/app/src/room/broadcast.ts
index 32dcffbc..d8476aea 100644
--- a/app/src/room/broadcast.ts
+++ b/app/src/room/broadcast.ts
@@ -6,6 +6,8 @@ import { Captions } from "./captions";
import { Chat } from "./chat";
import { FakeBroadcast } from "./fake";
import { Bounds, Vector } from "./geometry";
+import { Meme } from "./meme";
+import { Name } from "./name";
import { Sound } from "./sound";
import { Video } from "./video";
@@ -42,6 +44,7 @@ export class Broadcast
{
video: Video;
chat: Chat;
captions: Captions;
+ name: Name;
// The current chat message, if any.
message = new Signal(undefined);
@@ -61,14 +64,16 @@ export class Broadcast {
position: Signal;
// The meme video/audio we're rendering, if any.
- meme = new Signal(undefined);
- memeName = new Signal(undefined);
+ meme = new Signal(undefined);
scale: Signal; // room scale, 1 is 100%
zoom = new Signal(1.0); // local zoom, 1 is 100%
- // Show a locator arrow for 8 seconds to show our position on join.
- #locatorStart?: DOMHighResTimeStamp;
+ online = new Signal(true); // false is offline, true is online
+ #onlineTransition: DOMHighResTimeStamp = 0;
+
+ // Computed opacity based on online fade-in/fade-out (0-1)
+ opacity: number = 1;
signals = new Effect();
@@ -93,6 +98,7 @@ export class Broadcast {
this.audio = new Audio(this, props.sound);
this.chat = new Chat(this, props.canvas);
this.captions = new Captions(this, props.canvas);
+ this.name = new Name(this, props.canvas);
const viewport = this.canvas.viewport.peek();
@@ -152,11 +158,9 @@ export class Broadcast {
const meme = this.audio.sound.meme(memeName);
if (meme) {
this.meme.update((prev) => {
- prev?.pause();
+ prev?.element.pause();
return meme;
});
- this.memeName.set(memeName);
-
return;
}
}
@@ -192,9 +196,14 @@ export class Broadcast {
}
}
- // TODO Also make scale a signal
- tick() {
- this.video.tick();
+ tick(now: DOMHighResTimeStamp) {
+ this.audio.tick();
+ this.video.tick(now);
+
+ // Update opacity based on online status
+ const fadeTime = 300; // ms
+ const elapsed = now - this.#onlineTransition;
+ this.opacity = this.online.peek() ? Math.min(1, elapsed / fadeTime) : Math.max(0, 1 - elapsed / fadeTime);
const bounds = this.bounds.peek();
const viewport = this.canvas.viewport.peek();
@@ -281,80 +290,19 @@ export class Broadcast {
return false;
}
- // Render a locator arrow for our local broadcasts on join
- renderLocator(now: DOMHighResTimeStamp, ctx: CanvasRenderingContext2D) {
- if (!this.source.enabled.peek()) return;
-
- if (!this.visible.peek()) {
- this.#locatorStart = undefined;
- return;
- }
-
- if (!this.#locatorStart) {
- this.#locatorStart = now;
- }
-
- const elapsed = now - this.#locatorStart;
- const alpha = Math.min(Math.max((7000 - elapsed) / (10000 - 8000), 0), 1);
- if (alpha <= 0) {
- return;
- }
-
- const bounds = this.bounds.peek();
-
- ctx.save();
- ctx.globalAlpha *= alpha;
-
- // Calculate arrow position and animation
- const arrowSize = 12 * this.zoom.peek();
- const pulseScale = 1 + Math.sin(now / 500) * 0.1; // Subtle pulsing effect
- const offset = 10 * this.zoom.peek();
-
- const gap = 2 * (arrowSize + offset);
-
- const x = Math.min(Math.max(bounds.position.x + bounds.size.x / 2, 0), ctx.canvas.width);
- const y = Math.min(Math.max(bounds.position.y, 2 * gap), ctx.canvas.height);
-
- ctx.translate(x, y - gap);
- ctx.scale(pulseScale, pulseScale);
-
- ctx.beginPath();
- ctx.moveTo(0, arrowSize);
- ctx.lineTo(-arrowSize / 2, 0);
- ctx.lineTo(arrowSize / 2, 0);
- ctx.closePath();
-
- // Style the arrow
- ctx.lineWidth = 4 * this.zoom.peek();
- ctx.strokeStyle = "#000"; // Gold color
- ctx.fillStyle = "#FFD700";
- ctx.stroke();
- ctx.fill();
-
- // Draw "YOU" text
- const fontSize = Math.round(32 * this.zoom.peek()); // round to avoid busting font caches
- ctx.font = `bold ${fontSize}px Arial`;
- ctx.textAlign = "center";
- ctx.textBaseline = "middle";
- ctx.fillStyle = "#FFD700";
- ctx.strokeText("YOU", 0, -arrowSize - offset);
- ctx.fillText("YOU", 0, -arrowSize - offset);
-
- /*
- // Add a subtle glow effect
- ctx.shadowColor = "#FFD700";
- ctx.shadowBlur = 10 * fontScale;
- ctx.stroke();
- */
-
- ctx.restore();
+ // Called when online status changes to trigger fade transition
+ setOnline(online: boolean) {
+ this.online.set(online);
+ this.#onlineTransition = performance.now();
}
close() {
this.signals.close();
this.audio.close();
+ this.video.close();
this.chat.close();
this.captions.close();
+ this.name.close();
// NOTE: Don't close the source broadcast; we need it for the local preview.
// this.source.close();
diff --git a/app/src/room/canvas.ts b/app/src/room/canvas.ts
index 9a0c5fa3..2454f651 100644
--- a/app/src/room/canvas.ts
+++ b/app/src/room/canvas.ts
@@ -1,31 +1,21 @@
import { Effect, Signal } from "@kixelated/signals";
+import Settings from "../settings";
import { Vector } from "./geometry";
-
-const LINE_SPACING = 32;
-const LINE_WIDTH = 5;
-const SEGMENTS = 16;
-const WOBBLE_AMPLITUDE = 5;
-const BEND_AMPLITUDE = 8;
-const BEND_PROBABILITY = 0.2;
-const WOBBLE_SPEED = 0.0006;
-const LINE_OVERDRAW = 4;
-const COLOR_SPEED = 0.01;
-
-export type CanvasProps = {
- demo?: boolean;
-};
+import { BackgroundRenderer } from "./gl/background";
+import { Camera } from "./gl/camera";
+import { GLContext } from "./gl/context";
export class Canvas {
#canvas: HTMLCanvasElement;
- #context: CanvasRenderingContext2D;
+ #glContext: GLContext;
+ #camera: Camera;
+ #backgroundRenderer: BackgroundRenderer;
// Use a callback to render after the background.
- onRender?: (ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) => void;
- #animate?: number;
+ onRender?: (now: DOMHighResTimeStamp) => void;
visible: Signal;
viewport: Signal;
- demo: Signal;
#signals = new Effect();
@@ -33,77 +23,65 @@ export class Canvas {
return this.#canvas;
}
- constructor(element: HTMLCanvasElement, props?: CanvasProps) {
- this.#canvas = element;
+ get gl(): WebGL2RenderingContext {
+ return this.#glContext.gl;
+ }
+
+ get glContext(): GLContext {
+ return this.#glContext;
+ }
- this.demo = new Signal(props?.demo ?? false);
+ get camera() {
+ return this.#camera;
+ }
- const context = this.#canvas.getContext("2d");
- if (!context) {
- throw new Error("Failed to get canvas context");
- }
+ constructor(element: HTMLCanvasElement) {
+ this.#canvas = element;
- this.#context = context;
this.visible = new Signal(false);
this.viewport = new Signal(Vector.create(0, 0));
- const resize = () => {
- // Check if we're in fullscreen or fixed position
- const isFullscreen = document.fullscreenElement === this.#canvas;
- const style = window.getComputedStyle(this.#canvas);
- const isFixed = style.position === "fixed";
-
- let newWidth: number;
- let newHeight: number;
-
- if (isFullscreen || isFixed) {
- // Use window dimensions for fullscreen or fixed position
- newWidth = window.innerWidth;
- newHeight = window.innerHeight;
- } else {
- // Use parent container dimensions
- const parent = this.#canvas.parentElement;
- if (!parent) return;
-
- const rect = parent.getBoundingClientRect();
- newWidth = rect.width;
- newHeight = rect.height;
- }
+ // Initialize WebGL2 context
+ this.#glContext = new GLContext(this.#canvas, this.viewport);
+ this.#camera = new Camera();
+ this.#backgroundRenderer = new BackgroundRenderer(this.#glContext);
- newWidth *= window.devicePixelRatio;
- newHeight *= window.devicePixelRatio;
+ const resize = (entries: ResizeObserverEntry[]) => {
+ for (const entry of entries) {
+ // Get device pixel dimensions using the user's configured ratio
+ const dpr = Settings.render.scale.peek();
- // Only update canvas if dimensions actually changed
- // This prevents the canvas from being cleared when layout changes don't affect size
- if (this.#canvas.width === newWidth && this.#canvas.height === newHeight) {
- return;
- }
+ // Always use contentBoxSize and multiply by our custom ratio
+ // to ensure we respect the user's setting
+ const width = entry.contentBoxSize[0].inlineSize * dpr;
+ const height = entry.contentBoxSize[0].blockSize * dpr;
- this.#canvas.width = newWidth;
- this.#canvas.height = newHeight;
+ const newWidth = Math.max(1, Math.floor(width));
+ const newHeight = Math.max(1, Math.floor(height));
- // The internal logic ignores devicePixelRatio because we automatically scale when rendering.
- this.viewport.set(
- Vector.create(
- this.#canvas.width / window.devicePixelRatio,
- this.#canvas.height / window.devicePixelRatio,
- ),
- );
- };
+ // Only update canvas if dimensions actually changed
+ if (this.#canvas.width === newWidth && this.#canvas.height === newHeight) {
+ return;
+ }
- let resizeTimeout: ReturnType | undefined;
+ this.#canvas.width = newWidth;
+ this.#canvas.height = newHeight;
- const scheduleResize = () => {
- // Clear any existing timeout
- if (resizeTimeout) {
- clearTimeout(resizeTimeout);
- }
+ // Update WebGL viewport
+ this.#glContext.resize(newWidth, newHeight);
+
+ // The internal logic ignores devicePixelRatio because we automatically scale when rendering.
+ const viewport = Vector.create(newWidth / dpr, newHeight / dpr);
+ this.viewport.set(viewport);
- // Debounce resize to prevent flickering during rapid changes
- resizeTimeout = setTimeout(() => {
- resize();
- resizeTimeout = undefined;
- }, 50);
+ // Update camera projection
+ this.#camera.updateOrtho(viewport);
+
+ // Render immediately to avoid black flicker during resize
+ if (this.visible.peek()) {
+ this.#render(performance.now());
+ }
+ }
};
const visible = () => {
@@ -112,43 +90,22 @@ export class Canvas {
visible();
- // Set up ResizeObserver for parent when canvas is added to DOM
- let resizeObserver: ResizeObserver | null = null;
-
- const setupParentObserver = () => {
- const parent = this.#canvas.parentElement;
- if (parent && !resizeObserver) {
- resizeObserver = new ResizeObserver(scheduleResize);
- resizeObserver.observe(parent);
- resize();
- }
- };
+ // Set up ResizeObserver for canvas
+ // Use content-box so we can apply our custom devicePixelRatio setting
+ const resizeObserver = new ResizeObserver(resize);
+ resizeObserver.observe(this.#canvas, { box: "content-box" });
- // Try to set up observer immediately if already in DOM
- setupParentObserver();
+ this.#signals.event(document, "visibilitychange", visible);
- // Watch for canvas being added to DOM
- const mutationObserver = new MutationObserver(() => {
- if (this.#canvas.parentElement) {
- setupParentObserver();
- mutationObserver.disconnect();
- }
+ // Trigger resize when devicePixelRatio setting changes
+ this.#signals.subscribe(Settings.render.scale, () => {
+ // Force a resize by temporarily disconnecting and reconnecting
+ resizeObserver.disconnect();
+ resizeObserver.observe(this.#canvas, { box: "content-box" });
});
- if (!this.#canvas.parentElement) {
- mutationObserver.observe(document.body, { childList: true, subtree: true });
- }
-
- this.#signals.event(document, "visibilitychange", visible);
-
this.#signals.cleanup(() => {
- if (resizeObserver) {
- resizeObserver.disconnect();
- }
- mutationObserver.disconnect();
- if (resizeTimeout) {
- clearTimeout(resizeTimeout);
- }
+ resizeObserver.disconnect();
});
// Only render the canvas when it's visible.
@@ -156,115 +113,33 @@ export class Canvas {
const visible = effect.get(this.visible);
if (!visible) return;
- this.#animate = requestAnimationFrame(this.#render.bind(this));
- effect.cleanup(() => cancelAnimationFrame(this.#animate ?? 0));
+ let cancel: number;
+ const render = (now: DOMHighResTimeStamp) => {
+ try {
+ this.#render(now);
+ } catch (err) {
+ console.error("render error", err);
+ }
+ cancel = requestAnimationFrame(render);
+ };
+
+ cancel = requestAnimationFrame(render);
+
+ effect.cleanup(() => cancelAnimationFrame(cancel));
});
}
#render(now: DOMHighResTimeStamp) {
- const ctx = this.#context;
- ctx.imageSmoothingEnabled = true;
- ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
+ // Clear the screen
+ this.#glContext.clear();
- // Apply devicePixelRatio scaling once at the start
- // This allows all drawing operations to use logical pixels (CSS pixels)
- ctx.save();
- ctx.scale(window.devicePixelRatio, window.devicePixelRatio);
-
- this.#renderBackground(this.#context, now);
-
- if (this.demo.peek()) {
- this.#renderDemo(this.#context);
- }
+ // Render background with shader
+ this.#backgroundRenderer.render(now);
+ // Render broadcasts
if (this.onRender) {
- try {
- this.onRender(this.#context, now);
- } catch (err) {
- console.error("render error", err);
- }
- }
-
- // Restore the context to remove the scaling
- ctx.restore();
-
- this.#animate = requestAnimationFrame(this.#render.bind(this));
- }
-
- #renderDemo(ctx: CanvasRenderingContext2D) {
- ctx.save();
-
- // Use logical dimensions (CSS pixels)
- const width = ctx.canvas.width / window.devicePixelRatio;
- const height = ctx.canvas.height / window.devicePixelRatio;
-
- const fontSize = Math.round(64); // round to avoid busting font caches
- ctx.font = `bold ${fontSize}px sans-serif`;
- ctx.fillStyle = "rgba(255, 255, 255, 0.15)";
- ctx.textAlign = "center";
- ctx.textBaseline = "middle";
-
- const positions = [
- { x: width * 0.3, y: height * 0.3, angle: -25 },
- { x: width * 0.7, y: height * 0.5, angle: 30 },
- { x: width * 0.5, y: height * 0.7, angle: -15 },
- { x: width * 0.2, y: height * 0.6, angle: 20 },
- { x: width * 0.8, y: height * 0.25, angle: -35 },
- ];
-
- for (const pos of positions) {
- ctx.save();
- ctx.translate(pos.x, pos.y);
- ctx.rotate((pos.angle * Math.PI) / 180);
- ctx.fillText("DEMO", 0, 0);
- ctx.restore();
- }
-
- ctx.restore();
- }
-
- #renderBackground(ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) {
- ctx.save();
-
- // Use logical dimensions (CSS pixels)
- const width = ctx.canvas.width / window.devicePixelRatio;
- const height = ctx.canvas.height / window.devicePixelRatio;
-
- const LINE_COUNT = Math.ceil(height / LINE_SPACING) + LINE_OVERDRAW * 2;
-
- ctx.lineWidth = LINE_WIDTH;
- ctx.lineCap = "round";
- ctx.globalAlpha = 0.25;
-
- for (let i = 0; i < LINE_COUNT; i++) {
- ctx.strokeStyle = lineColor(now, i);
-
- const baseY = (i - LINE_OVERDRAW) * LINE_SPACING;
- const wobble = Math.sin(now * WOBBLE_SPEED + i) * WOBBLE_AMPLITUDE;
-
- ctx.beginPath();
-
- for (let s = 0; s <= SEGMENTS; s++) {
- const t = s / SEGMENTS;
- const xBase = -100 + t * (width + 200);
- const xWobble = Math.sin(now * WOBBLE_SPEED + s + i) * WOBBLE_AMPLITUDE;
- const x = xBase + xWobble;
-
- const seed = (s * 31 + i * 17) % 100;
- const bend = seed / 100 < BEND_PROBABILITY ? (seed % 2 === 0 ? 1 : -1) * BEND_AMPLITUDE : 0;
-
- const y = baseY + wobble + bend + t * 200;
- if (s === 0) {
- ctx.moveTo(x, y);
- } else {
- ctx.lineTo(x, y);
- }
- }
-
- ctx.stroke();
+ this.onRender(now);
}
-
- ctx.restore();
}
toggleFullscreen() {
@@ -294,63 +169,6 @@ export class Canvas {
close() {
this.#signals.close();
+ this.#backgroundRenderer.cleanup();
}
}
-
-function lineColor(now: DOMHighResTimeStamp, i: number) {
- const hue = (i * 25 + now * COLOR_SPEED) % 360;
- return `hsl(${hue}, 75%, 50%)`;
-}
-
-// A node function to output the above as a
-export function generateSvg() {
- const now = 0;
- const WIDTH = 1920;
- const HEIGHT = 1080;
-
- const LINE_COUNT = Math.ceil(HEIGHT / LINE_SPACING) + LINE_OVERDRAW * 2;
-
- const paths = [];
- for (let i = 0; i < LINE_COUNT; i++) {
- const color = lineColor(now, i);
- const baseY = (i - LINE_OVERDRAW) * LINE_SPACING;
- const wobble = Math.sin(now * WOBBLE_SPEED + i) * WOBBLE_AMPLITUDE;
-
- const commands = [];
-
- for (let s = 0; s <= SEGMENTS; s++) {
- const t = s / SEGMENTS;
- const xBase = -100 + t * (WIDTH + 200);
- const xWobble = Math.sin(now * WOBBLE_SPEED + s + i) * WOBBLE_AMPLITUDE;
- const x = xBase + xWobble;
-
- const seed = (s * 31 + i * 17) % 100;
- const bend = seed / 100 < BEND_PROBABILITY ? (seed % 2 === 0 ? 1 : -1) * BEND_AMPLITUDE : 0;
-
- const y = baseY + wobble + bend + t * 200;
- const cmd = `${s === 0 ? "M" : "L"} ${x.toFixed(1)}, ${y.toFixed(1)}`;
- commands.push(cmd);
- }
-
- const d = commands.join(" ");
-
- paths.push(` `);
- }
-
- return `
-
-
-
- ${paths.join("\n")}
-
- `;
-}
-
-/* UNCOMMENT TO GENERATE SVG
-import fs from "node:fs";
-
-if (import.meta.url === `file://${process.argv[1]}`) {
- fs.writeFileSync("public/image/background.svg", generateSvg());
- console.log("SVG written to public/image/background.svg");
-}
-*/
diff --git a/app/src/room/chat.ts b/app/src/room/chat.ts
index 53f19c41..f7416274 100644
--- a/app/src/room/chat.ts
+++ b/app/src/room/chat.ts
@@ -124,6 +124,18 @@ export class Chat {
updatePosition(bounds, viewport);
});
+ // Update position when window scrolls
+ effect.event(
+ window,
+ "scroll",
+ () => {
+ const bounds = this.broadcast.bounds.peek();
+ const viewport = this.broadcast.canvas.viewport.peek();
+ updatePosition(bounds, viewport);
+ },
+ { passive: true },
+ );
+
effect.effect((effect) => {
const typing = effect.get(this.broadcast.source.chat.typing.active);
DOM.setClass(effect, icon, typing ? "icon-[mdi--chat-typing]" : "icon-[mdi--chat]");
diff --git a/app/src/room/fake.ts b/app/src/room/fake.ts
index 582585f4..c18e425e 100644
--- a/app/src/room/fake.ts
+++ b/app/src/room/fake.ts
@@ -53,7 +53,7 @@ export class FakeBroadcast {
};
video = {
- frame: new Signal(undefined),
+ frame: new Signal(undefined),
catalog: new Signal(undefined),
detection: {
enabled: new Signal(false),
@@ -63,8 +63,6 @@ export class FakeBroadcast {
signals = new Effect();
- #video: HTMLVideoElement | undefined;
-
constructor(sound: Sound, props?: FakeBroadcastProps) {
this.sound = sound;
@@ -97,6 +95,7 @@ export class FakeBroadcast {
});
}
+ // Plays a video file.
play(src: URL) {
const video = document.createElement("video");
video.src = src.toString();
@@ -114,8 +113,15 @@ export class FakeBroadcast {
video.load();
video.play();
- this.#video = video;
- this.video.frame.set(video);
+ const onFrame = () => {
+ this.video.frame.update((prev) => {
+ prev?.close();
+ return new VideoFrame(video);
+ });
+ video.requestVideoFrameCallback(onFrame);
+ };
+
+ video.requestVideoFrameCallback(onFrame);
video.onloadedmetadata = () => {
this.video.catalog.set([
@@ -131,18 +137,47 @@ export class FakeBroadcast {
]);
};
+ video.onended = () => this.stop();
+
const source = new MediaElementAudioSourceNode(this.sound.context, { mediaElement: video });
this.audio.root.set(source);
}
+ // "plays" an image file.
+ show(src: URL) {
+ const image = new Image();
+ image.src = src.toString();
+ image.onload = () => {
+ this.video.frame.update((prev) => {
+ prev?.close();
+ return new VideoFrame(image, { timestamp: 0 });
+ });
+
+ this.video.catalog.set([
+ {
+ track: "image",
+ config: {
+ codec: "fake",
+ displayAspectWidth: u53(image.width),
+ displayAspectHeight: u53(image.height),
+ },
+ },
+ ]);
+ };
+ }
+
stop() {
- this.#video?.pause();
- this.#video = undefined;
+ this.video.frame.update((prev) => {
+ prev?.close();
+ return undefined;
+ });
this.audio.root.update((prev) => {
prev?.disconnect();
return undefined;
});
+
+ this.video.catalog.set(undefined);
}
close() {
diff --git a/app/src/room/gl/background.ts b/app/src/room/gl/background.ts
new file mode 100644
index 00000000..f4c6ada1
--- /dev/null
+++ b/app/src/room/gl/background.ts
@@ -0,0 +1,74 @@
+import type { GLContext } from "./context";
+import { Attribute, Shader, Uniform1f, Uniform2f } from "./shader";
+import backgroundFragSource from "./shaders/background.frag?raw";
+import backgroundVertSource from "./shaders/background.vert?raw";
+
+export class BackgroundRenderer {
+ #glContext: GLContext;
+ #program: Shader;
+ #vao: WebGLVertexArrayObject;
+ #positionBuffer: WebGLBuffer;
+
+ // Typed uniforms and attributes
+ #u_resolution: Uniform2f;
+ #u_time: Uniform1f;
+ #a_position: Attribute;
+
+ constructor(glContext: GLContext) {
+ this.#glContext = glContext;
+ const gl = glContext.gl;
+
+ this.#program = new Shader(gl, backgroundVertSource, backgroundFragSource);
+
+ // Initialize typed uniforms and attributes
+ this.#u_resolution = this.#program.createUniform2f("u_resolution");
+ this.#u_time = this.#program.createUniform1f("u_time");
+ this.#a_position = this.#program.createAttribute("a_position");
+
+ const vao = gl.createVertexArray();
+ if (!vao) throw new Error("Failed to create VAO");
+ this.#vao = vao;
+
+ const positionBuffer = gl.createBuffer();
+ if (!positionBuffer) throw new Error("Failed to create position buffer");
+ this.#positionBuffer = positionBuffer;
+
+ this.#setupQuad();
+ }
+
+ #setupQuad() {
+ const gl = this.#glContext.gl;
+
+ // Fullscreen quad vertices (clip space)
+ const positions = new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]);
+
+ gl.bindVertexArray(this.#vao);
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
+
+ gl.enableVertexAttribArray(this.#a_position.location);
+ gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0);
+
+ gl.bindVertexArray(null);
+ }
+
+ render(now: DOMHighResTimeStamp) {
+ const gl = this.#glContext.gl;
+ const viewport = this.#glContext.viewport.peek();
+
+ this.#program.use();
+ this.#u_resolution.set(viewport.x, viewport.y);
+ this.#u_time.set(now);
+
+ gl.bindVertexArray(this.#vao);
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+ gl.bindVertexArray(null);
+ }
+
+ cleanup() {
+ const gl = this.#glContext.gl;
+ gl.deleteVertexArray(this.#vao);
+ gl.deleteBuffer(this.#positionBuffer);
+ this.#program.cleanup();
+ }
+}
diff --git a/app/src/room/gl/border.ts b/app/src/room/gl/border.ts
new file mode 100644
index 00000000..c54f4116
--- /dev/null
+++ b/app/src/room/gl/border.ts
@@ -0,0 +1,137 @@
+import type { Broadcast } from "../broadcast";
+import { Canvas } from "../canvas";
+import type { Camera } from "./camera";
+import { Attribute, Shader, Uniform1f, Uniform2f, Uniform4f, UniformMatrix4fv } from "./shader";
+import borderFragSource from "./shaders/border.frag?raw";
+import borderVertSource from "./shaders/border.vert?raw";
+
+export class BorderRenderer {
+ #canvas: Canvas;
+ #program: Shader;
+ #vao: WebGLVertexArrayObject;
+ #positionBuffer: WebGLBuffer;
+ #indexBuffer: WebGLBuffer;
+
+ // Typed uniforms
+ #u_projection: UniformMatrix4fv;
+ #u_bounds: Uniform4f;
+ #u_depth: Uniform1f;
+ #u_radius: Uniform1f;
+ #u_size: Uniform2f;
+ #u_opacity: Uniform1f;
+
+ // Typed attributes
+ #a_position: Attribute;
+
+ constructor(canvas: Canvas) {
+ this.#canvas = canvas;
+ this.#program = new Shader(canvas.gl, borderVertSource, borderFragSource);
+
+ // Initialize typed uniforms
+ this.#u_projection = this.#program.createUniformMatrix4fv("u_projection");
+ this.#u_bounds = this.#program.createUniform4f("u_bounds");
+ this.#u_depth = this.#program.createUniform1f("u_depth");
+ this.#u_radius = this.#program.createUniform1f("u_radius");
+ this.#u_size = this.#program.createUniform2f("u_size");
+ this.#u_opacity = this.#program.createUniform1f("u_opacity");
+
+ // Initialize typed attributes
+ this.#a_position = this.#program.createAttribute("a_position");
+
+ const vao = this.#canvas.gl.createVertexArray();
+ if (!vao) throw new Error("Failed to create VAO");
+ this.#vao = vao;
+
+ const positionBuffer = this.#canvas.gl.createBuffer();
+ if (!positionBuffer) throw new Error("Failed to create position buffer");
+ this.#positionBuffer = positionBuffer;
+
+ const indexBuffer = this.#canvas.gl.createBuffer();
+ if (!indexBuffer) throw new Error("Failed to create index buffer");
+ this.#indexBuffer = indexBuffer;
+
+ this.#setupBuffers();
+ }
+
+ #setupBuffers() {
+ const gl = this.#canvas.gl;
+
+ // Quad vertices (0-1 range, will be scaled by bounds)
+ const positions = new Float32Array([
+ 0,
+ 0, // Top-left
+ 1,
+ 0, // Top-right
+ 1,
+ 1, // Bottom-right
+ 0,
+ 1, // Bottom-left
+ ]);
+
+ // Indices for two triangles
+ const indices = new Uint16Array([0, 1, 2, 0, 2, 3]);
+
+ gl.bindVertexArray(this.#vao);
+
+ // Position attribute
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(this.#a_position.location);
+ gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0);
+
+ // Index buffer
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.#indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
+
+ gl.bindVertexArray(null);
+ }
+
+ render(broadcast: Broadcast, camera: Camera, maxZ: number) {
+ const gl = this.#canvas.gl;
+ const bounds = broadcast.bounds.peek();
+ const scale = broadcast.zoom.peek();
+
+ this.#program.use();
+
+ // Set projection matrix
+ this.#u_projection.set(camera.projection);
+
+ // Border size (PADDING from old implementation)
+ const border = 12 * scale;
+
+ // Bounds need to include the border expansion
+ this.#u_bounds.set(
+ bounds.position.x - border,
+ bounds.position.y - border,
+ bounds.size.x + border * 2,
+ bounds.size.y + border * 2,
+ );
+
+ // Set depth - borders should be behind everything
+ const baseDepth = camera.zToDepth(broadcast.position.peek().z, maxZ);
+ const depth = baseDepth - 0.04; // Further behind than audio viz
+ this.#u_depth.set(depth);
+
+ // Set radius for rounded corners
+ this.#u_radius.set(border);
+
+ // Set size for SDF calculation
+ this.#u_size.set(bounds.size.x + border * 2, bounds.size.y + border * 2);
+
+ // Set opacity
+ this.#u_opacity.set(broadcast.opacity);
+
+ // Draw
+ gl.bindVertexArray(this.#vao);
+ gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
+ gl.bindVertexArray(null);
+ }
+
+ close() {
+ const gl = this.#canvas.gl;
+ gl.deleteVertexArray(this.#vao);
+ gl.deleteBuffer(this.#positionBuffer);
+ gl.deleteBuffer(this.#indexBuffer);
+ this.#program.cleanup();
+ }
+}
diff --git a/app/src/room/gl/broadcast.ts b/app/src/room/gl/broadcast.ts
new file mode 100644
index 00000000..0310e632
--- /dev/null
+++ b/app/src/room/gl/broadcast.ts
@@ -0,0 +1,213 @@
+import type { Broadcast } from "../broadcast";
+import { Canvas } from "../canvas";
+import type { Camera } from "./camera";
+import { Attribute, Shader, Uniform1f, Uniform1i, Uniform2f, Uniform4f, UniformMatrix4fv } from "./shader";
+import broadcastFragSource from "./shaders/broadcast.frag?raw";
+import broadcastVertSource from "./shaders/broadcast.vert?raw";
+
+export class BroadcastRenderer {
+ #canvas: Canvas;
+ #program: Shader;
+ #vao: WebGLVertexArrayObject;
+ #positionBuffer: WebGLBuffer;
+ #texCoordBuffer: WebGLBuffer;
+ #indexBuffer: WebGLBuffer;
+
+ // Typed uniforms
+ #u_projection: UniformMatrix4fv;
+ #u_bounds: Uniform4f;
+ #u_depth: Uniform1f;
+ #u_radius: Uniform1f;
+ #u_size: Uniform2f;
+ #u_opacity: Uniform1f;
+ #u_frameOpacity: Uniform1f;
+ #u_frameTexture: Uniform1i;
+ #u_avatarTexture: Uniform1i;
+ #u_avatarActive: Uniform1i;
+ #u_memeTexture: Uniform1i;
+ #u_memeOpacity: Uniform1f;
+ #u_memeBounds: Uniform4f;
+ #u_flip: Uniform1i;
+
+ // Typed attributes
+ #a_position: Attribute;
+ #a_texCoord: Attribute;
+
+ constructor(canvas: Canvas) {
+ this.#canvas = canvas;
+ this.#program = new Shader(canvas.gl, broadcastVertSource, broadcastFragSource);
+
+ // Initialize typed uniforms
+ this.#u_projection = this.#program.createUniformMatrix4fv("u_projection");
+ this.#u_bounds = this.#program.createUniform4f("u_bounds");
+ this.#u_depth = this.#program.createUniform1f("u_depth");
+ this.#u_radius = this.#program.createUniform1f("u_radius");
+ this.#u_size = this.#program.createUniform2f("u_size");
+ this.#u_opacity = this.#program.createUniform1f("u_opacity");
+ this.#u_frameOpacity = this.#program.createUniform1f("u_frameOpacity");
+ this.#u_frameTexture = this.#program.createUniform1i("u_frameTexture");
+ this.#u_avatarTexture = this.#program.createUniform1i("u_avatarTexture");
+ this.#u_avatarActive = this.#program.createUniform1i("u_avatarActive");
+ this.#u_memeTexture = this.#program.createUniform1i("u_memeTexture");
+ this.#u_memeOpacity = this.#program.createUniform1f("u_memeOpacity");
+ this.#u_memeBounds = this.#program.createUniform4f("u_memeBounds");
+ this.#u_flip = this.#program.createUniform1i("u_flip");
+
+ // Initialize typed attributes
+ this.#a_position = this.#program.createAttribute("a_position");
+ this.#a_texCoord = this.#program.createAttribute("a_texCoord");
+
+ const vao = this.#canvas.gl.createVertexArray();
+ if (!vao) throw new Error("Failed to create VAO");
+ this.#vao = vao;
+
+ const positionBuffer = this.#canvas.gl.createBuffer();
+ if (!positionBuffer) throw new Error("Failed to create position buffer");
+ this.#positionBuffer = positionBuffer;
+
+ const texCoordBuffer = this.#canvas.gl.createBuffer();
+ if (!texCoordBuffer) throw new Error("Failed to create texCoord buffer");
+ this.#texCoordBuffer = texCoordBuffer;
+
+ const indexBuffer = this.#canvas.gl.createBuffer();
+ if (!indexBuffer) throw new Error("Failed to create index buffer");
+ this.#indexBuffer = indexBuffer;
+
+ this.#setupBuffers();
+ }
+
+ #setupBuffers() {
+ const gl = this.#canvas.gl;
+
+ // Quad vertices (0-1 range, will be scaled by bounds)
+ const positions = new Float32Array([
+ 0,
+ 0, // Top-left
+ 1,
+ 0, // Top-right
+ 1,
+ 1, // Bottom-right
+ 0,
+ 1, // Bottom-left
+ ]);
+
+ // Texture coordinates
+ const texCoords = new Float32Array([
+ 0,
+ 0, // Top-left
+ 1,
+ 0, // Top-right
+ 1,
+ 1, // Bottom-right
+ 0,
+ 1, // Bottom-left
+ ]);
+
+ // Indices for two triangles
+ const indices = new Uint16Array([0, 1, 2, 0, 2, 3]);
+
+ gl.bindVertexArray(this.#vao);
+
+ // Position attribute
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(this.#a_position.location);
+ gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0);
+
+ // TexCoord attribute
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.#texCoordBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, texCoords, gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(this.#a_texCoord.location);
+ gl.vertexAttribPointer(this.#a_texCoord.location, 2, gl.FLOAT, false, 0, 0);
+
+ // Index buffer
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.#indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
+
+ gl.bindVertexArray(null);
+ }
+
+ render(
+ broadcast: Broadcast,
+ camera: Camera,
+ maxZ: number,
+ modifiers?: {
+ dragging?: boolean;
+ hovering?: boolean;
+ },
+ ) {
+ this.#program.use();
+
+ const gl = this.#canvas.gl;
+ const bounds = broadcast.bounds.peek();
+ const scale = broadcast.zoom.peek();
+
+ // Set projection matrix
+ this.#u_projection.set(camera.projection);
+
+ // Set bounds (x, y, width, height)
+ this.#u_bounds.set(bounds.position.x, bounds.position.y, bounds.size.x, bounds.size.y);
+
+ // Set depth based on z-index
+ const depth = camera.zToDepth(broadcast.position.peek().z, maxZ);
+ this.#u_depth.set(depth);
+
+ // Set radius for rounded corners
+ const radius = 12 * scale;
+ this.#u_radius.set(radius);
+
+ // Set size for SDF calculation
+ this.#u_size.set(bounds.size.x, bounds.size.y);
+
+ // Set opacity
+ let opacity = broadcast.opacity;
+ if (modifiers?.dragging) {
+ opacity *= 0.7;
+ }
+
+ this.#u_opacity.set(opacity);
+
+ // Set pre-computed opacity values
+ this.#u_frameOpacity.set(broadcast.video.frameOpacity);
+ this.#u_memeOpacity.set(broadcast.video.memeOpacity);
+
+ // Set flip flag
+ this.#u_flip.set(broadcast.video.flip.peek() ? 1 : 0);
+
+ gl.activeTexture(gl.TEXTURE0);
+ gl.bindTexture(gl.TEXTURE_2D, broadcast.video.frameTexture);
+ this.#u_frameTexture.set(0);
+
+ // Bind avatar texture if available
+ gl.activeTexture(gl.TEXTURE1);
+ gl.bindTexture(gl.TEXTURE_2D, broadcast.video.avatarTexture);
+ this.#u_avatarTexture.set(1);
+ this.#u_avatarActive.set(broadcast.video.avatarSize ? 1 : 0);
+
+ // Bind meme texture if available
+ const memeTexture = broadcast.video.memeTexture;
+ const memeBounds = broadcast.video.memeBounds;
+
+ gl.activeTexture(gl.TEXTURE2);
+ gl.bindTexture(gl.TEXTURE_2D, memeTexture);
+ this.#u_memeTexture.set(2);
+
+ if (memeBounds) {
+ this.#u_memeBounds.set(memeBounds.position.x, memeBounds.position.y, memeBounds.size.x, memeBounds.size.y);
+ }
+
+ // Draw
+ gl.bindVertexArray(this.#vao);
+ gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
+ gl.bindVertexArray(null);
+ }
+
+ close() {
+ const gl = this.#canvas.gl;
+ gl.deleteVertexArray(this.#vao);
+ gl.deleteBuffer(this.#positionBuffer);
+ gl.deleteBuffer(this.#texCoordBuffer);
+ gl.deleteBuffer(this.#indexBuffer);
+ this.#program.cleanup();
+ }
+}
diff --git a/app/src/room/gl/camera.ts b/app/src/room/gl/camera.ts
new file mode 100644
index 00000000..e266cce3
--- /dev/null
+++ b/app/src/room/gl/camera.ts
@@ -0,0 +1,50 @@
+import { Vector } from "../geometry";
+
+export class Camera {
+ projection: Float32Array;
+
+ constructor() {
+ this.projection = new Float32Array(16);
+ }
+
+ // Create a 2D orthographic projection matrix
+ updateOrtho(viewport: Vector) {
+ const left = 0;
+ const right = viewport.x;
+ const bottom = viewport.y;
+ const top = 0;
+ const near = -100; // Allow some depth for z-index
+ const far = 100;
+
+ // Column-major order for WebGL
+ this.projection[0] = 2 / (right - left);
+ this.projection[1] = 0;
+ this.projection[2] = 0;
+ this.projection[3] = 0;
+
+ this.projection[4] = 0;
+ this.projection[5] = 2 / (top - bottom);
+ this.projection[6] = 0;
+ this.projection[7] = 0;
+
+ this.projection[8] = 0;
+ this.projection[9] = 0;
+ this.projection[10] = 2 / (near - far);
+ this.projection[11] = 0;
+
+ this.projection[12] = (left + right) / (left - right);
+ this.projection[13] = (bottom + top) / (bottom - top);
+ this.projection[14] = (near + far) / (near - far);
+ this.projection[15] = 1;
+ }
+
+ // Convert z-index to depth value
+ // Higher z-index = closer to camera (lower depth value for LEQUAL test)
+ zToDepth(z: number, maxZ: number): number {
+ // Normalize z-index to 0-1 range, then map to depth range
+ // We use a small range to keep everything mostly 2D
+ // Invert so higher z = more negative (closer to camera)
+ const normalized = maxZ > 0 ? z / maxZ : 0;
+ return -(1.0 - normalized) * 0.01; // Higher z = closer to 0 (front)
+ }
+}
diff --git a/app/src/room/gl/context.ts b/app/src/room/gl/context.ts
new file mode 100644
index 00000000..ff5e3ee5
--- /dev/null
+++ b/app/src/room/gl/context.ts
@@ -0,0 +1,44 @@
+import { Signal } from "@kixelated/signals";
+import { Vector } from "../geometry";
+
+export class GLContext {
+ gl: WebGL2RenderingContext;
+ canvas: HTMLCanvasElement;
+ viewport: Signal;
+
+ constructor(canvas: HTMLCanvasElement, viewport: Signal) {
+ const gl = canvas.getContext("webgl2", {
+ alpha: false,
+ antialias: true,
+ depth: true,
+ premultipliedAlpha: false,
+ });
+
+ if (!gl) {
+ throw new Error("WebGL2 not supported");
+ }
+
+ this.gl = gl;
+ this.canvas = canvas;
+ this.viewport = viewport;
+
+ // Enable depth testing for z-index ordering
+ gl.enable(gl.DEPTH_TEST);
+ gl.depthFunc(gl.LEQUAL);
+
+ // Enable blending for transparency
+ gl.enable(gl.BLEND);
+ gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
+ }
+
+ clear() {
+ const gl = this.gl;
+ gl.clearColor(0, 0, 0, 1);
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+ }
+
+ resize(width: number, height: number) {
+ const gl = this.gl;
+ gl.viewport(0, 0, width, height);
+ }
+}
diff --git a/app/src/room/gl/outline.ts b/app/src/room/gl/outline.ts
new file mode 100644
index 00000000..08a0eca4
--- /dev/null
+++ b/app/src/room/gl/outline.ts
@@ -0,0 +1,196 @@
+import type { Broadcast } from "../broadcast";
+import { Canvas } from "../canvas";
+import type { Camera } from "./camera";
+import { Attribute, Shader, Uniform1f, Uniform2f, Uniform3f, Uniform4f, UniformMatrix4fv } from "./shader";
+import outlineFragSource from "./shaders/outline.frag?raw";
+import outlineVertSource from "./shaders/outline.vert?raw";
+
+export class OutlineRenderer {
+ #canvas: Canvas;
+ #program: Shader;
+ #vao: WebGLVertexArrayObject;
+ #positionBuffer: WebGLBuffer;
+ #indexBuffer: WebGLBuffer;
+
+ // Typed uniforms
+ #u_projection: UniformMatrix4fv;
+ #u_bounds: Uniform4f;
+ #u_depth: Uniform1f;
+ #u_radius: Uniform1f;
+ #u_size: Uniform2f;
+ #u_opacity: Uniform1f;
+ #u_volume: Uniform1f;
+ #u_border: Uniform1f;
+ #u_color: Uniform3f;
+ #u_time: Uniform1f;
+ #u_finalAlpha: Uniform1f;
+
+ // Typed attributes
+ #a_position: Attribute;
+
+ constructor(canvas: Canvas) {
+ this.#canvas = canvas;
+ this.#program = new Shader(canvas.gl, outlineVertSource, outlineFragSource);
+
+ // Initialize typed uniforms
+ this.#u_projection = this.#program.createUniformMatrix4fv("u_projection");
+ this.#u_bounds = this.#program.createUniform4f("u_bounds");
+ this.#u_depth = this.#program.createUniform1f("u_depth");
+ this.#u_radius = this.#program.createUniform1f("u_radius");
+ this.#u_size = this.#program.createUniform2f("u_size");
+ this.#u_opacity = this.#program.createUniform1f("u_opacity");
+ this.#u_volume = this.#program.createUniform1f("u_volume");
+ this.#u_border = this.#program.createUniform1f("u_border");
+ this.#u_color = this.#program.createUniform3f("u_color");
+ this.#u_time = this.#program.createUniform1f("u_time");
+ this.#u_finalAlpha = this.#program.createUniform1f("u_finalAlpha");
+
+ // Initialize typed attributes
+ this.#a_position = this.#program.createAttribute("a_position");
+
+ const vao = this.#canvas.gl.createVertexArray();
+ if (!vao) throw new Error("Failed to create VAO");
+ this.#vao = vao;
+
+ const positionBuffer = this.#canvas.gl.createBuffer();
+ if (!positionBuffer) throw new Error("Failed to create position buffer");
+ this.#positionBuffer = positionBuffer;
+
+ const indexBuffer = this.#canvas.gl.createBuffer();
+ if (!indexBuffer) throw new Error("Failed to create index buffer");
+ this.#indexBuffer = indexBuffer;
+
+ this.#setupBuffers();
+ }
+
+ #setupBuffers() {
+ const gl = this.#canvas.gl;
+
+ // Quad vertices (0-1 range, will be scaled by bounds)
+ const positions = new Float32Array([
+ 0,
+ 0, // Top-left
+ 1,
+ 0, // Top-right
+ 1,
+ 1, // Bottom-right
+ 0,
+ 1, // Bottom-left
+ ]);
+
+ // Indices for two triangles
+ const indices = new Uint16Array([0, 1, 2, 0, 2, 3]);
+
+ gl.bindVertexArray(this.#vao);
+
+ // Position attribute
+ gl.bindBuffer(gl.ARRAY_BUFFER, this.#positionBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(this.#a_position.location);
+ gl.vertexAttribPointer(this.#a_position.location, 2, gl.FLOAT, false, 0, 0);
+
+ // Index buffer
+ gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.#indexBuffer);
+ gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
+
+ gl.bindVertexArray(null);
+ }
+
+ render(broadcast: Broadcast, camera: Camera, maxZ: number, now: DOMHighResTimeStamp) {
+ const gl = this.#canvas.gl;
+ const bounds = broadcast.bounds.peek();
+ const scale = broadcast.zoom.peek();
+ const volume = broadcast.audio.volume;
+
+ this.#program.use();
+
+ // Set time
+ this.#u_time.set(now);
+
+ // Set projection matrix
+ this.#u_projection.set(camera.projection);
+
+ // Border size (PADDING from old implementation)
+ const border = 12 * scale;
+
+ // Expand bounds to accommodate ripple and line width
+ // Line can extend: lineInset(2) + lineWidth(3) + aaWidth(2) + ripple beyond border
+ const maxExpansion = border * 1.5;
+
+ // Bounds need to include the border expansion plus ripple space
+ this.#u_bounds.set(
+ bounds.position.x - maxExpansion,
+ bounds.position.y - maxExpansion,
+ bounds.size.x + maxExpansion * 2,
+ bounds.size.y + maxExpansion * 2,
+ );
+
+ // Set depth - outline should be behind ALL videos
+ // Videos are in range -0.01 to 0 (based on z-index)
+ // Add a tiny offset to make outlines slightly further
+ const baseDepth = camera.zToDepth(broadcast.position.peek().z, maxZ);
+ const depth = baseDepth - 0.02; // More negative = further away, behind all videos
+ this.#u_depth.set(depth);
+
+ // Set radius for rounded corners
+ this.#u_radius.set(border);
+
+ // Set size for SDF calculation - this is the total quad size (video + 2*maxExpansion)
+ // The shader will calculate videoSize by subtracting 2*border from this
+ this.#u_size.set(bounds.size.x + maxExpansion * 2, bounds.size.y + maxExpansion * 2);
+
+ // Apply opacity based on volume and video online status
+ const opacity = Math.min(10 * volume, 1) * broadcast.opacity;
+ this.#u_opacity.set(opacity);
+
+ // Set volume (smoothed, from 0-1)
+ this.#u_volume.set(volume);
+
+ // Set border size
+ this.#u_border.set(border);
+
+ // Compute final alpha once in TypeScript instead of per pixel
+ const finalAlpha = 0.3 + volume * 0.4;
+ this.#u_finalAlpha.set(finalAlpha);
+
+ // Set color based on volume using HSL from old implementation
+ // hue = 180 + volume * 120
+ const hue = 180 + volume * 120;
+
+ // Convert HSL to RGB
+ const h = hue / 360;
+ const s = 0.8;
+ const l = 0.45;
+
+ const hueToRgb = (p: number, q: number, t: number) => {
+ if (t < 0) t += 1;
+ if (t > 1) t -= 1;
+ if (t < 1 / 6) return p + (q - p) * 6 * t;
+ if (t < 1 / 2) return q;
+ if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
+ return p;
+ };
+
+ // Convert HSL to RGB
+ const q = l < 0.5 ? l * (1 + s) : l + s - l * s;
+ const p = 2 * l - q;
+ const r = hueToRgb(p, q, h + 1 / 3);
+ const g = hueToRgb(p, q, h);
+ const b = hueToRgb(p, q, h - 1 / 3);
+
+ this.#u_color.set(r, g, b);
+
+ // Draw
+ gl.bindVertexArray(this.#vao);
+ gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
+ gl.bindVertexArray(null);
+ }
+
+ close() {
+ const gl = this.#canvas.gl;
+ gl.deleteVertexArray(this.#vao);
+ gl.deleteBuffer(this.#positionBuffer);
+ gl.deleteBuffer(this.#indexBuffer);
+ this.#program.cleanup();
+ }
+}
diff --git a/app/src/room/gl/shader.ts b/app/src/room/gl/shader.ts
new file mode 100644
index 00000000..b705af8d
--- /dev/null
+++ b/app/src/room/gl/shader.ts
@@ -0,0 +1,198 @@
+// Typed uniform wrappers
+export class Uniform1f {
+ #location: WebGLUniformLocation;
+ #gl: WebGL2RenderingContext;
+
+ constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) {
+ this.#location = location;
+ this.#gl = gl;
+ }
+
+ set(value: number) {
+ this.#gl.uniform1f(this.#location, value);
+ }
+}
+
+export class Uniform2f {
+ #location: WebGLUniformLocation;
+ #gl: WebGL2RenderingContext;
+
+ constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) {
+ this.#location = location;
+ this.#gl = gl;
+ }
+
+ set(x: number, y: number) {
+ this.#gl.uniform2f(this.#location, x, y);
+ }
+}
+
+export class Uniform3f {
+ #location: WebGLUniformLocation;
+ #gl: WebGL2RenderingContext;
+
+ constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) {
+ this.#location = location;
+ this.#gl = gl;
+ }
+
+ set(x: number, y: number, z: number) {
+ this.#gl.uniform3f(this.#location, x, y, z);
+ }
+}
+
+export class Uniform4f {
+ #location: WebGLUniformLocation;
+ #gl: WebGL2RenderingContext;
+
+ constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) {
+ this.#location = location;
+ this.#gl = gl;
+ }
+
+ set(x: number, y: number, z: number, w: number) {
+ this.#gl.uniform4f(this.#location, x, y, z, w);
+ }
+}
+
+export class Uniform1i {
+ #location: WebGLUniformLocation;
+ #gl: WebGL2RenderingContext;
+
+ constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) {
+ this.#location = location;
+ this.#gl = gl;
+ }
+
+ set(value: number) {
+ this.#gl.uniform1i(this.#location, value);
+ }
+}
+
+export class UniformMatrix4fv {
+ #location: WebGLUniformLocation;
+ #gl: WebGL2RenderingContext;
+
+ constructor(location: WebGLUniformLocation, gl: WebGL2RenderingContext) {
+ this.#location = location;
+ this.#gl = gl;
+ }
+
+ set(value: Float32Array) {
+ this.#gl.uniformMatrix4fv(this.#location, false, value);
+ }
+}
+
+// Typed attribute wrapper
+export class Attribute {
+ readonly location: number;
+
+ constructor(location: number) {
+ this.location = location;
+ }
+}
+
+export class Shader {
+ gl: WebGL2RenderingContext;
+ program: WebGLProgram;
+
+ constructor(gl: WebGL2RenderingContext, vertexSource: string, fragmentSource: string) {
+ this.gl = gl;
+
+ const vertexShader = this.#compileShader(gl.VERTEX_SHADER, vertexSource);
+ const fragmentShader = this.#compileShader(gl.FRAGMENT_SHADER, fragmentSource);
+
+ const program = gl.createProgram();
+ if (!program) {
+ throw new Error("Failed to create shader program");
+ }
+
+ gl.attachShader(program, vertexShader);
+ gl.attachShader(program, fragmentShader);
+ gl.linkProgram(program);
+
+ if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
+ const info = gl.getProgramInfoLog(program);
+ gl.deleteProgram(program);
+ throw new Error(`Shader program link failed: ${info}`);
+ }
+
+ this.program = program;
+
+ // Clean up shaders after linking
+ gl.deleteShader(vertexShader);
+ gl.deleteShader(fragmentShader);
+ }
+
+ #compileShader(type: number, source: string): WebGLShader {
+ const gl = this.gl;
+ const shader = gl.createShader(type);
+ if (!shader) {
+ throw new Error("Failed to create shader");
+ }
+
+ gl.shaderSource(shader, source);
+ gl.compileShader(shader);
+
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
+ const info = gl.getShaderInfoLog(shader);
+ gl.deleteShader(shader);
+ throw new Error(`Shader compilation failed: ${info}`);
+ }
+
+ return shader;
+ }
+
+ use() {
+ this.gl.useProgram(this.program);
+ }
+
+ #getUniform(name: string): WebGLUniformLocation {
+ const loc = this.gl.getUniformLocation(this.program, name);
+ if (!loc) {
+ throw new Error(`Uniform ${name} not found`);
+ }
+ return loc;
+ }
+
+ #getAttribute(name: string): number {
+ const loc = this.gl.getAttribLocation(this.program, name);
+ if (loc === -1) {
+ throw new Error(`Attribute ${name} not found`);
+ }
+ return loc;
+ }
+
+ // Typed wrapper factory methods
+ createUniform1f(name: string): Uniform1f {
+ return new Uniform1f(this.#getUniform(name), this.gl);
+ }
+
+ createUniform2f(name: string): Uniform2f {
+ return new Uniform2f(this.#getUniform(name), this.gl);
+ }
+
+ createUniform3f(name: string): Uniform3f {
+ return new Uniform3f(this.#getUniform(name), this.gl);
+ }
+
+ createUniform4f(name: string): Uniform4f {
+ return new Uniform4f(this.#getUniform(name), this.gl);
+ }
+
+ createUniform1i(name: string): Uniform1i {
+ return new Uniform1i(this.#getUniform(name), this.gl);
+ }
+
+ createUniformMatrix4fv(name: string): UniformMatrix4fv {
+ return new UniformMatrix4fv(this.#getUniform(name), this.gl);
+ }
+
+ createAttribute(name: string): Attribute {
+ return new Attribute(this.#getAttribute(name));
+ }
+
+ cleanup() {
+ this.gl.deleteProgram(this.program);
+ }
+}
diff --git a/app/src/room/gl/shaders/background.frag b/app/src/room/gl/shaders/background.frag
new file mode 100644
index 00000000..81e660e9
--- /dev/null
+++ b/app/src/room/gl/shaders/background.frag
@@ -0,0 +1,80 @@
+#version 300 es
+precision highp float;
+
+in vec2 v_pixel;
+out vec4 fragColor;
+
+uniform vec2 u_resolution;
+uniform float u_time;
+
+const float LINE_SPACING = 28.0;
+const float LINE_WIDTH = 4.0;
+const float WOBBLE_AMPLITUDE = 5.0;
+const float WOBBLE_SPEED = 0.0004;
+
+const float SEGMENT_WIDTH = 120.0; // Pixels per segment
+
+// Hash function for deterministic randomness
+float hash(float n) {
+ return fract(sin(n) * 43758.5453123);
+}
+
+// Convert HSL to RGB
+vec3 hsl2rgb(float h, float s, float l) {
+ float c = (1.0 - abs(2.0 * l - 1.0)) * s;
+ float x = c * (1.0 - abs(mod(h / 60.0, 2.0) - 1.0));
+ float m = l - c / 2.0;
+
+ vec3 rgb;
+ if (h < 60.0) rgb = vec3(c, x, 0.0);
+ else if (h < 120.0) rgb = vec3(x, c, 0.0);
+ else if (h < 180.0) rgb = vec3(0.0, c, x);
+ else if (h < 240.0) rgb = vec3(0.0, x, c);
+ else if (h < 300.0) rgb = vec3(x, 0.0, c);
+ else rgb = vec3(c, 0.0, x);
+
+ return rgb + m;
+}
+
+void main() {
+ // Work in simple horizontal line space - rotation happens in vertex shader
+ vec2 pos = v_pixel;
+
+ // Determine which horizontal segment we're in
+ float segmentIndex = floor(pos.x / SEGMENT_WIDTH);
+
+ // Determine which line this pixel belongs to
+ float lineIndex = floor(pos.y / LINE_SPACING);
+
+ // Get wobble offsets at segment boundaries
+ float seedStart = segmentIndex * 31.0 + lineIndex * 17.0;
+ float seedEnd = (segmentIndex + 1.0) * 31.0 + lineIndex * 17.0;
+
+ float wobbleStart = hash(seedStart) * WOBBLE_AMPLITUDE * 2.0 - WOBBLE_AMPLITUDE;
+ float wobbleEnd = hash(seedEnd) * WOBBLE_AMPLITUDE * 2.0 - WOBBLE_AMPLITUDE;
+
+ // Add animated wobble
+ wobbleStart += sin(u_time * WOBBLE_SPEED + lineIndex + segmentIndex) * WOBBLE_AMPLITUDE;
+ wobbleEnd += sin(u_time * WOBBLE_SPEED + lineIndex + segmentIndex + 1.0) * WOBBLE_AMPLITUDE;
+
+ // Interpolate wobble within segment
+ float segmentT = mod(pos.x, SEGMENT_WIDTH) / SEGMENT_WIDTH;
+ float wobble = mix(wobbleStart, wobbleEnd, segmentT);
+
+ // Calculate the line position (centered in the bucket)
+ float baseLineY = lineIndex * LINE_SPACING + LINE_SPACING * 0.5;
+ float lineY = baseLineY + wobble;
+
+ // Distance from this pixel to the line
+ float dist = abs(pos.y - lineY);
+
+ // Calculate line color
+ float hue = mod(lineIndex * 25.0 + u_time * 0.01, 360.0);
+ vec3 lineColor = hsl2rgb(hue, 0.75, 0.5);
+
+ // Anti-aliased lines with feathering
+ float lineAlpha = 1.0 - smoothstep(LINE_WIDTH * 0.25, LINE_WIDTH * 1.25, dist);
+ lineAlpha *= 0.5;
+
+ fragColor = vec4(lineColor * lineAlpha, lineAlpha);
+}
diff --git a/app/src/room/gl/shaders/background.vert b/app/src/room/gl/shaders/background.vert
new file mode 100644
index 00000000..5275585a
--- /dev/null
+++ b/app/src/room/gl/shaders/background.vert
@@ -0,0 +1,29 @@
+#version 300 es
+
+in vec2 a_position;
+
+out vec2 v_pixel;
+
+uniform vec2 u_resolution;
+
+void main() {
+ // Rotate the entire quad in clip space
+ float angle = -0.25;
+ float cosA = cos(angle);
+ float sinA = sin(angle);
+ vec2 rotatedPos = vec2(
+ a_position.x * cosA - a_position.y * sinA,
+ a_position.x * sinA + a_position.y * cosA
+ );
+
+ // Scale rotated quad to ensure it covers the viewport
+ // sqrt(2) ~= 1.42 ensures rotated square covers original square
+ rotatedPos *= 1.5;
+
+ // Convert to pixel coordinates for fragment shader (unrotated logical space)
+ vec2 uv = a_position * 0.5 + 0.5;
+ v_pixel = uv * u_resolution;
+
+ // Place background far back in depth (z = 1.0 in clip space)
+ gl_Position = vec4(rotatedPos, 1.0, 1.0);
+}
diff --git a/app/src/room/gl/shaders/border.frag b/app/src/room/gl/shaders/border.frag
new file mode 100644
index 00000000..0ec03941
--- /dev/null
+++ b/app/src/room/gl/shaders/border.frag
@@ -0,0 +1,43 @@
+#version 300 es
+precision highp float;
+
+in vec2 v_pos;
+
+uniform float u_radius;
+uniform vec2 u_size;
+uniform float u_opacity;
+uniform float u_border; // Border size in pixels
+
+out vec4 fragColor;
+
+// Signed distance function for rounded rectangle
+float roundedBoxSDF(vec2 center, vec2 size, float radius) {
+ vec2 q = abs(center) - size + radius;
+ return min(max(q.x, q.y), 0.0) + length(max(q, 0.0)) - radius;
+}
+
+void main() {
+ // v_pos is 0-1 in the quad
+ // u_size is the total bounds size (video + border on each side)
+ // u_border is the border width
+
+ // Calculate position from center of the bounds
+ vec2 center = (v_pos - 0.5) * u_size;
+
+ // Outer edge of the entire thing (edge of black border)
+ float outerDist = roundedBoxSDF(center, u_size * 0.5, u_radius);
+
+ // Discard anything outside the outer bounds
+ if (outerDist > 1.0) {
+ discard;
+ }
+
+ // Fill the entire area with black (no transparency gaps)
+ vec3 color = vec3(0.0);
+ float alpha = 1.0;
+
+ // Antialiasing only on the outer edge
+ float aa = 1.0 - smoothstep(-1.0, 0.0, outerDist);
+
+ fragColor = vec4(color, alpha * aa * u_opacity);
+}
diff --git a/app/src/room/gl/shaders/border.vert b/app/src/room/gl/shaders/border.vert
new file mode 100644
index 00000000..1f29aa27
--- /dev/null
+++ b/app/src/room/gl/shaders/border.vert
@@ -0,0 +1,20 @@
+#version 300 es
+precision highp float;
+
+in vec2 a_position;
+
+uniform mat4 u_projection;
+uniform vec4 u_bounds; // x, y, width, height
+uniform float u_depth;
+
+out vec2 v_pos; // Position within the quad (0-1)
+
+void main() {
+ // Scale and translate to bounds
+ vec2 pos = a_position * u_bounds.zw + u_bounds.xy;
+
+ // Apply projection
+ gl_Position = u_projection * vec4(pos, u_depth, 1.0);
+
+ v_pos = a_position;
+}
diff --git a/app/src/room/gl/shaders/broadcast.frag b/app/src/room/gl/shaders/broadcast.frag
new file mode 100644
index 00000000..eadbe3e2
--- /dev/null
+++ b/app/src/room/gl/shaders/broadcast.frag
@@ -0,0 +1,69 @@
+#version 300 es
+precision highp float;
+
+in vec2 v_texCoord;
+in vec2 v_pos;
+
+uniform sampler2D u_frameTexture;
+uniform sampler2D u_avatarTexture;
+uniform sampler2D u_memeTexture;
+uniform bool u_avatarActive;
+uniform bool u_flip; // Whether to flip the frame texture horizontally
+uniform float u_radius;
+uniform vec2 u_size;
+uniform float u_opacity;
+uniform float u_frameOpacity; // Pre-computed frame opacity (0-1)
+uniform float u_memeOpacity; // Pre-computed meme opacity (0-1)
+uniform vec4 u_memeBounds; // x, y, width, height in texture coordinates
+
+out vec4 fragColor;
+
+// Signed distance function for rounded rectangle
+float roundedBoxSDF(vec2 center, vec2 size, float radius) {
+ vec2 q = abs(center) - size + radius;
+ return min(max(q.x, q.y), 0.0) + length(max(q, 0.0)) - radius;
+}
+
+void main() {
+ // Calculate position from center
+ vec2 center = (v_pos - 0.5) * u_size;
+
+ // Calculate SDF for rounded corners
+ float dist = roundedBoxSDF(center, u_size * 0.5, u_radius);
+
+ // Discard pixels outside the rounded rectangle
+ if (dist > 0.0) {
+ discard;
+ }
+
+ // Smooth edge antialiasing
+ float alpha = 1.0 - smoothstep(-1.0, 0.0, dist);
+
+ // Calculate texture coordinates (flip horizontally if needed for frame)
+ vec2 frameTexCoord = u_flip ? vec2(1.0 - v_texCoord.x, v_texCoord.y) : v_texCoord;
+
+ // Sample textures using pre-computed opacity values
+ vec4 frameColor = u_frameOpacity > 0.0 ? texture(u_frameTexture, frameTexCoord) : vec4(0.0, 0.0, 0.0, 1.0);
+ vec4 avatarColor = u_avatarActive && u_frameOpacity < 1.0 ? texture(u_avatarTexture, v_texCoord) : vec4(0.0, 0.0, 0.0, 1.0);
+ vec4 baseColor = mix(avatarColor, frameColor, u_frameOpacity);
+
+ if (u_memeOpacity > 0.0) {
+ // Calculate the meme texture coordinates based on memeBounds
+ // memeBounds contains the x, y offset and width, height scaling
+ vec2 memeTexCoord = (v_texCoord - u_memeBounds.xy) / u_memeBounds.zw;
+
+ // Only sample if we're within the meme bounds
+ if (memeTexCoord.x >= 0.0 && memeTexCoord.x <= 1.0 &&
+ memeTexCoord.y >= 0.0 && memeTexCoord.y <= 1.0) {
+ vec4 memeColor = texture(u_memeTexture, memeTexCoord);
+
+ // Blend meme on top using alpha compositing
+ // The meme uses WebM+VP9 with alpha channel for transparency
+ float memeAlpha = memeColor.a * u_memeOpacity;
+ baseColor.rgb = mix(baseColor.rgb, memeColor.rgb, memeAlpha);
+ baseColor.a = max(baseColor.a, memeAlpha);
+ }
+ }
+
+ fragColor = vec4(baseColor.rgb, baseColor.a * alpha * u_opacity);
+}
diff --git a/app/src/room/gl/shaders/broadcast.vert b/app/src/room/gl/shaders/broadcast.vert
new file mode 100644
index 00000000..dcd10e9d
--- /dev/null
+++ b/app/src/room/gl/shaders/broadcast.vert
@@ -0,0 +1,22 @@
+#version 300 es
+
+in vec2 a_position;
+in vec2 a_texCoord;
+
+uniform mat4 u_projection;
+uniform vec4 u_bounds; // x, y, width, height
+uniform float u_depth;
+
+out vec2 v_texCoord;
+out vec2 v_pos; // Position within the quad (0-1)
+
+void main() {
+ // Scale and translate to bounds
+ vec2 pos = a_position * u_bounds.zw + u_bounds.xy;
+
+ // Apply projection
+ gl_Position = u_projection * vec4(pos, u_depth, 1.0);
+
+ v_texCoord = a_texCoord;
+ v_pos = a_position;
+}
diff --git a/app/src/room/gl/shaders/outline.frag b/app/src/room/gl/shaders/outline.frag
new file mode 100644
index 00000000..69f4a15c
--- /dev/null
+++ b/app/src/room/gl/shaders/outline.frag
@@ -0,0 +1,102 @@
+#version 300 es
+precision highp float;
+
+in vec2 v_pos;
+
+uniform float u_radius;
+uniform vec2 u_size;
+uniform float u_opacity;
+uniform float u_volume; // Audio volume 0-1 (smoothed)
+uniform float u_border; // Border size in pixels
+uniform vec3 u_color; // RGB color for the volume indicator
+uniform float u_time; // Time in seconds for animation
+uniform float u_finalAlpha; // Pre-computed final alpha (0.3 + volume * 0.4)
+
+out vec4 fragColor;
+
+// Signed distance function for rounded rectangle
+float roundedBoxSDF(vec2 center, vec2 size, float radius) {
+ vec2 q = abs(center) - size + radius;
+ return min(max(q.x, q.y), 0.0) + length(max(q, 0.0)) - radius;
+}
+
+void main() {
+ if (u_opacity <= 0.01) {
+ discard;
+ }
+
+ // v_pos is 0-1 in the quad
+ // u_size is the total quad size (video + maxExpansion on each side)
+ // u_border is the border width (black outline size)
+
+ // Calculate position from center of the bounds
+ vec2 center = (v_pos - 0.5) * u_size;
+
+ // The render bounds are expanded by 1.5x border, but we need to find the actual video size
+ // maxExpansion = border * 1.5, so video size = u_size - 2*maxExpansion = u_size - 3.0*border
+ vec2 videoSize = u_size - vec2(u_border * 3.0);
+
+ // Inner edge at video boundary
+ float videoDist = roundedBoxSDF(center, videoSize * 0.5, u_radius);
+
+ // Discard if 2px within video area, creating a border.
+ if (videoDist <= 2.0) {
+ discard;
+ }
+
+ // Calculate angle around the perimeter for ripple effect
+ float angle = atan(center.y, center.x);
+
+ // Ripple effect using triangle wave (linear/jagged)
+ float rippleFreq = 8.0; // Number of ripples around the perimeter
+ float rippleSpeed = 1.5; // Slower animation
+ float rippleAmount = u_volume * u_border * 0.1; // Ripple intensity (10% of border - more subtle)
+
+ // Create ripple offset using triangle wave (sawtooth converted to triangle)
+ // This creates a linear back-and-forth motion instead of smooth sine
+ float phase = angle * rippleFreq + u_time * rippleSpeed;
+ float sawtooth = fract(phase / (2.0 * 3.14159265));
+ float triangle = abs(sawtooth * 2.0 - 1.0) * 2.0 - 1.0;
+ float ripple = triangle * rippleAmount;
+
+ // Base expansion from volume (0 to border)
+ float baseExpand = u_border * min(1.0, u_volume);
+
+ // Apply ripple to the expansion (can go beyond border slightly)
+ float totalExpand = baseExpand + ripple;
+
+ // Distance to the edge of the colored region
+ float colorDist = roundedBoxSDF(center, videoSize * 0.5 + totalExpand, u_radius);
+
+ // Line configuration (as percentage of border)
+ float lineInset = u_border * 0.42; // Push line inward to hide behind video frame edge
+ float lineWidth = u_border * 0.25; // Solid line width
+ float aaWidth = u_border * 0.17; // Anti-aliasing width on each side
+ float totalWidth = lineWidth + aaWidth;
+
+ // Discard if well outside the line region
+ if (colorDist > totalWidth || videoDist < -lineInset) {
+ discard;
+ }
+
+ // In the colored region
+ vec3 finalColor = u_color;
+
+ // Create a sharp line with AA on edges, inset from video boundary
+ float innerEdge = videoDist + lineInset; // Offset inward
+ float outerEdge = abs(colorDist);
+
+ // Fade in from the inset edge over aaWidth
+ float innerAA = smoothstep(0.0, aaWidth, innerEdge);
+
+ // Full opacity in the middle of the line
+ float lineMask = step(outerEdge, lineWidth);
+
+ // Fade out at the outer edge over aaWidth
+ float outerAA = smoothstep(lineWidth + aaWidth, lineWidth, outerEdge);
+
+ // Combine: AA at inner edge, full in middle, AA at outer edge
+ float aa = innerAA * mix(outerAA, 1.0, lineMask);
+
+ fragColor = vec4(finalColor, u_finalAlpha * aa * u_opacity);
+}
diff --git a/app/src/room/gl/shaders/outline.vert b/app/src/room/gl/shaders/outline.vert
new file mode 100644
index 00000000..0286f10e
--- /dev/null
+++ b/app/src/room/gl/shaders/outline.vert
@@ -0,0 +1,19 @@
+#version 300 es
+
+in vec2 a_position;
+
+uniform mat4 u_projection;
+uniform vec4 u_bounds; // x, y, width, height
+uniform float u_depth;
+
+out vec2 v_pos; // Position within the quad (0-1)
+
+void main() {
+ // Scale and translate to bounds
+ vec2 pos = a_position * u_bounds.zw + u_bounds.xy;
+
+ // Apply projection
+ gl_Position = u_projection * vec4(pos, u_depth, 1.0);
+
+ v_pos = a_position;
+}
diff --git a/app/src/room/index.ts b/app/src/room/index.ts
index 7fc55066..d13673ae 100644
--- a/app/src/room/index.ts
+++ b/app/src/room/index.ts
@@ -1,9 +1,11 @@
import { Publish, Watch } from "@kixelated/hang";
import * as Moq from "@kixelated/moq";
-import { Effect } from "@kixelated/signals";
+import { Effect, Signal } from "@kixelated/signals";
import Settings from "../settings";
+import { Broadcast } from "./broadcast";
import type { Canvas } from "./canvas";
import { Local } from "./local";
+import { Locator } from "./locator";
import { Space } from "./space";
export interface RoomProps {
@@ -24,6 +26,9 @@ export class Room {
// The physics space for the room.
space: Space;
+ #cameraBroadcast = new Signal | undefined>(undefined);
+ #shareBroadcast = new Signal | undefined>(undefined);
+
#signals = new Effect();
constructor(props: RoomProps) {
@@ -54,6 +59,30 @@ export class Room {
this.#signals.timer(() => {
this.space.sound?.tts.enabled.set(true);
}, 1000);
+
+ // Manage the locator for the camera broadcast
+ this.#signals.effect((effect) => {
+ const cameraBroadcast = effect.get(this.#cameraBroadcast);
+ if (!cameraBroadcast) return;
+
+ const locator = new Locator(cameraBroadcast);
+ effect.cleanup(() => locator.close());
+
+ // Auto-close after 8 seconds (7s visible + 1s fade transition)
+ effect.timer(() => locator.close(), 8000);
+ });
+
+ // Manage the locator for the share broadcast
+ this.#signals.effect((effect) => {
+ const shareBroadcast = effect.get(this.#shareBroadcast);
+ if (!shareBroadcast) return;
+
+ const locator = new Locator(shareBroadcast);
+ effect.cleanup(() => locator.close());
+
+ // Auto-close after 8 seconds (7s visible + 1s fade transition)
+ effect.timer(() => locator.close(), 8000);
+ });
}
async #run(announced: Moq.Announced) {
@@ -61,20 +90,27 @@ export class Room {
const update = await announced.next();
if (!update) break;
- let local: Publish.Broadcast | undefined;
if (update.path === this.local.camera.path.peek()) {
- local = this.local.camera;
- } else if (update.path === this.local.share.path.peek()) {
- local = this.local.share;
+ if (update.active) {
+ const broadcast = this.space.add(update.path, this.local.camera);
+ this.#cameraBroadcast.set(broadcast as Broadcast);
+ } else {
+ this.space.remove(update.path);
+ this.#cameraBroadcast.set(undefined);
+ }
+
+ continue;
}
- if (local) {
+ if (update.path === this.local.share.path.peek()) {
if (update.active) {
- this.space.add(update.path, local);
+ const broadcast = this.space.add(update.path, this.local.share);
+ this.#shareBroadcast.set(broadcast as Broadcast);
} else {
- // NOTE: We don't close local sources so we can toggle them.
this.space.remove(update.path);
+ this.#shareBroadcast.set(undefined);
}
+
continue;
}
diff --git a/app/src/room/local.ts b/app/src/room/local.ts
index 96c44d61..d14f17c7 100644
--- a/app/src/room/local.ts
+++ b/app/src/room/local.ts
@@ -208,18 +208,6 @@ export class Local {
});
*/
- // Say hi when the user joins
- this.#signals.effect((effect) => {
- const name = effect.get(Settings.account.name);
- if (!name) return;
-
- // This is enabled on join.
- const enabled = effect.get(this.camera.enabled);
- if (!enabled) return;
-
- this.sound.tts.joined(name);
- });
-
// Use the provided camera and screen broadcasts
this.camera.signals.effect((effect) => {
if (effect.get(this.camera.video.source) || effect.get(this.camera.audio.source)) {
diff --git a/app/src/room/locator.ts b/app/src/room/locator.ts
new file mode 100644
index 00000000..ff1209f9
--- /dev/null
+++ b/app/src/room/locator.ts
@@ -0,0 +1,105 @@
+import type { Publish } from "@kixelated/hang";
+import { Effect, Signal } from "@kixelated/signals";
+import * as DOM from "@kixelated/signals/dom";
+import type { Broadcast } from "./broadcast";
+import { Bounds, Vector } from "./geometry";
+
+export class Locator {
+ broadcast: Broadcast;
+ signals = new Effect();
+
+ #visible = new Signal(true);
+
+ constructor(broadcast: Broadcast) {
+ this.broadcast = broadcast;
+
+ this.signals.effect(this.#render.bind(this));
+
+ // Start fading out after 7 seconds
+ this.signals.timer(() => {
+ this.#visible.set(false);
+ }, 7000);
+ }
+
+ #render(effect: Effect) {
+ // Container for arrow and text
+ const root = DOM.create("div", {
+ className: "fixed pointer-events-none transition-opacity duration-1000 animate-throb",
+ });
+
+ // Arrow pointing down
+ const arrow = DOM.create("div", {
+ className: "absolute left-1/2 -translate-x-1/2",
+ });
+
+ // Triangle SVG for the arrow
+ const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg");
+ svg.setAttribute("width", "32");
+ svg.setAttribute("height", "32");
+ svg.setAttribute("viewBox", "0 0 32 32");
+
+ const path = document.createElementNS("http://www.w3.org/2000/svg", "path");
+ path.setAttribute("d", "M16 28 L8 16 L24 16 Z");
+ path.setAttribute("fill", "#FFD700");
+ path.setAttribute("stroke", "#000");
+ path.setAttribute("stroke-width", "2");
+
+ svg.appendChild(path);
+ arrow.appendChild(svg);
+ root.appendChild(arrow);
+
+ // "YOU" text above the arrow
+ const text = DOM.create("div", {
+ className:
+ "absolute left-1/2 -translate-x-1/2 -top-8 font-bold text-2xl text-[#FFD700] [text-shadow:_0_0_8px_rgb(0_0_0_/_80%),_2px_2px_4px_rgb(0_0_0)]",
+ textContent: "YOU",
+ });
+ root.appendChild(text);
+
+ // Update the position based on broadcast bounds
+ const updatePosition = (bounds: Bounds, viewport: Vector) => {
+ const canvasRect = this.broadcast.canvas.element.getBoundingClientRect();
+
+ // Scale bounds from canvas coordinates to page coordinates
+ const scaleX = canvasRect.width / viewport.x;
+ const scaleY = canvasRect.height / viewport.y;
+
+ // Calculate position above the broadcast
+ const x = (bounds.position.x + bounds.size.x / 2) * scaleX + canvasRect.left;
+ const y = bounds.position.y * scaleY + canvasRect.top;
+
+ // Position the locator, with some gap above the broadcast
+ const gap = 60; // Distance above the broadcast
+ const top = Math.max(canvasRect.top, y - gap);
+ const left = Math.min(Math.max(canvasRect.left, x), canvasRect.left + canvasRect.width);
+
+ root.style.left = `${left}px`;
+ root.style.top = `${top}px`;
+ };
+
+ // Update position when bounds or viewport change
+ effect.effect((effect) => {
+ const bounds = effect.get(this.broadcast.bounds);
+ const viewport = effect.get(this.broadcast.canvas.viewport);
+ updatePosition(bounds, viewport);
+ });
+
+ // Set z-index based on broadcast z-index
+ effect.effect((effect) => {
+ const z = effect.get(this.broadcast.position).z;
+ root.style.zIndex = `${100 + z}`;
+ });
+
+ // Control opacity based on visible signal
+ effect.effect((effect) => {
+ const visible = effect.get(this.#visible);
+ root.style.opacity = visible ? "1" : "0";
+ });
+
+ DOM.render(effect, document.body, root);
+ }
+
+ close() {
+ this.signals.close();
+ }
+}
diff --git a/app/src/room/meme.ts b/app/src/room/meme.ts
index 518489f7..01e3de3f 100644
--- a/app/src/room/meme.ts
+++ b/app/src/room/meme.ts
@@ -1,9 +1,13 @@
-export type MemeAudio = {
+type MemeAudioSource = {
file: string;
emoji: string;
+
+ // These are not used for audio, but are defined to make it easier to use the same type for both.
+ fit?: "contain";
+ position?: string;
};
-export type MemeVideo = {
+type MemeVideoSource = {
file: string;
// CSS object-fit: how the video fits within its container
// - "contain": scales to fit entirely within container (may have letterbox/pillarbox)
@@ -17,6 +21,18 @@ export type MemeVideo = {
position?: string;
};
+export interface MemeVideo {
+ source: MemeVideoSource;
+ element: HTMLVideoElement;
+}
+
+export interface MemeAudio {
+ source: MemeAudioSource;
+ element: HTMLAudioElement;
+}
+
+export type Meme = MemeVideo | MemeAudio;
+
export const MEME_AUDIO = {
"among-us": { file: "among-us.mp3", emoji: "📮" },
aww: { file: "aww.mp3", emoji: "🥺" },
@@ -55,7 +71,7 @@ export const MEME_AUDIO = {
boom: { file: "boom.mp3", emoji: "💥" },
wow: { file: "wow.mp3", emoji: "😮" },
yay: { file: "yay.mp3", emoji: "🎉" },
-} as const satisfies Record;
+} as const satisfies Record;
export const MEME_VIDEO = {
"another-one": { file: "another-one.webm", fit: "cover", position: "bottom" },
@@ -92,7 +108,7 @@ export const MEME_VIDEO = {
"real-estate": { file: "real-estate.webm", fit: "cover", position: "center" }, // fit height, left/right can letterbox
waw: { file: "waw.webm", fit: "cover", position: "bottom" },
zzz: { file: "zzz.webm", fit: "cover", position: "center" },
-} as const satisfies Record;
+} as const satisfies Record;
export type MemeAudioName = keyof typeof MEME_AUDIO;
export type MemeVideoName = keyof typeof MEME_VIDEO;
diff --git a/app/src/room/name.ts b/app/src/room/name.ts
new file mode 100644
index 00000000..b9e339f0
--- /dev/null
+++ b/app/src/room/name.ts
@@ -0,0 +1,122 @@
+import { Effect, Signal } from "@kixelated/signals";
+import * as DOM from "@kixelated/signals/dom";
+import type { Broadcast } from "./broadcast";
+import type { Canvas } from "./canvas";
+import { Bounds, Vector } from "./geometry";
+
+export class Name {
+ canvas: Canvas;
+ broadcast: Broadcast;
+
+ signals = new Effect();
+
+ #hovering = new Signal(false);
+ #profile = new Signal(false);
+
+ constructor(broadcast: Broadcast, canvas: Canvas) {
+ this.broadcast = broadcast;
+ this.canvas = canvas;
+
+ this.signals.effect(this.#render.bind(this));
+ }
+
+ setHovering(hovering: boolean) {
+ this.#hovering.set(hovering);
+ }
+
+ setProfile(profile: boolean) {
+ this.#profile.set(profile);
+ }
+
+ #render(effect: Effect) {
+ const root = DOM.create("div", {
+ className:
+ "fixed pointer-events-none transition-opacity duration-200 text-white font-bold [text-shadow:_-1px_-1px_0_#000,_1px_-1px_0_#000,_-1px_1px_0_#000,_1px_1px_0_#000] overflow-hidden text-ellipsis whitespace-nowrap select-none",
+ });
+
+ // Update the position of the name when the broadcast bounds or viewport changes
+ const updatePosition = (bounds: Bounds, viewport: Vector) => {
+ // Get the canvas element's position on the page
+ const canvasRect = this.canvas.element.getBoundingClientRect();
+
+ // Scale bounds from canvas coordinates to page coordinates
+ const scaleX = canvasRect.width / viewport.x;
+ const scaleY = canvasRect.height / viewport.y;
+
+ // Transform bounds to page coordinates
+ const pageBounds = {
+ x: bounds.position.x * scaleX + canvasRect.left,
+ y: bounds.position.y * scaleY + canvasRect.top,
+ width: bounds.size.x * scaleX,
+ };
+
+ // Position name at top-left of broadcast with offset
+ const fontSize = 12;
+ const offset = 12;
+
+ // Clamp position to stay within canvas bounds
+ const left = Math.round(
+ Math.max(canvasRect.left + offset, Math.min(pageBounds.x + offset, canvasRect.right - offset)),
+ );
+ const top = Math.round(
+ Math.max(
+ canvasRect.top + offset,
+ Math.min(pageBounds.y + offset, canvasRect.bottom - fontSize - offset),
+ ),
+ );
+
+ root.style.left = `${left}px`;
+ root.style.top = `${top}px`;
+ root.style.fontSize = `${fontSize}px`;
+
+ // Max width should be constrained by both broadcast width and canvas bounds
+ const maxWidthFromBroadcast = Math.max(0, pageBounds.width - 2 * offset);
+ const maxWidthFromCanvas = Math.max(0, canvasRect.right - left - offset);
+ root.style.maxWidth = `${Math.min(maxWidthFromBroadcast, maxWidthFromCanvas)}px`;
+ };
+
+ // Update name text
+ effect.effect((effect) => {
+ const name = effect.get(this.broadcast.source.user.name);
+ root.textContent = name || "";
+ });
+
+ // Update position when bounds, viewport, or zoom change
+ effect.effect((effect) => {
+ const bounds = effect.get(this.broadcast.bounds);
+ const viewport = effect.get(this.broadcast.canvas.viewport);
+ updatePosition(bounds, viewport);
+ });
+
+ // Update position when window scrolls
+ effect.event(
+ window,
+ "scroll",
+ () => {
+ const bounds = this.broadcast.bounds.peek();
+ const viewport = this.broadcast.canvas.viewport.peek();
+ updatePosition(bounds, viewport);
+ },
+ { passive: true },
+ );
+
+ // Update z-index based on broadcast position
+ effect.effect((effect) => {
+ const z = effect.get(this.broadcast.position).z;
+ root.style.zIndex = `${100 + z}`;
+ });
+
+ // Control opacity based on hovering or profile mode
+ effect.effect((effect) => {
+ const hovering = effect.get(this.#hovering);
+ const profile = effect.get(this.#profile);
+ root.style.opacity = hovering || profile ? "1" : "0";
+ });
+
+ DOM.render(effect, document.body, root);
+ }
+
+ close() {
+ this.signals.close();
+ }
+}
diff --git a/app/src/room/sound.ts b/app/src/room/sound.ts
index 6bc6b78e..3a8bd331 100644
--- a/app/src/room/sound.ts
+++ b/app/src/room/sound.ts
@@ -132,7 +132,15 @@ export class Sound {
}
}
-import { MEME_AUDIO, MEME_AUDIO_LOOKUP, MEME_VIDEO, MEME_VIDEO_LOOKUP, MemeAudioName, MemeVideoName } from "./meme";
+import {
+ MEME_AUDIO,
+ MEME_AUDIO_LOOKUP,
+ MEME_VIDEO,
+ MEME_VIDEO_LOOKUP,
+ Meme,
+ MemeAudioName,
+ MemeVideoName,
+} from "./meme";
export class PannedNotifications {
#parent: Sound;
@@ -183,7 +191,7 @@ export class PannedNotifications {
// NOTE: We don't cache elements because the browser will.
// Otherwise it would be a pain in the butt to manage if the same meme is played simultaneously.
- meme(name: string): HTMLAudioElement | HTMLVideoElement | undefined {
+ meme(name: string): Meme | undefined {
// Make the name lowercase and remove hyphens for lookup
const lower = name.toLowerCase();
const lookupKey = lower.replace(/-/g, "");
@@ -192,13 +200,13 @@ export class PannedNotifications {
const videoKey = MEME_VIDEO_LOOKUP[lookupKey] || (lower as MemeVideoName);
const audioKey = MEME_AUDIO_LOOKUP[lookupKey] || (lower as MemeAudioName);
- const videoData = MEME_VIDEO[videoKey];
- const audioData = MEME_AUDIO[audioKey];
+ const videoSource = MEME_VIDEO[videoKey];
+ const audioSource = MEME_AUDIO[audioKey];
// Use the video if it's available
- if (videoData) {
+ if (videoSource) {
const video = document.createElement("video") as HTMLVideoElement;
- video.src = `/meme/${videoData.file}`;
+ video.src = `/meme/${videoSource.file}`;
if (this.#parent.suspended.peek()) {
video.muted = true; // so we can start loading
@@ -210,14 +218,14 @@ export class PannedNotifications {
video.autoplay = true;
video.load();
video.play();
- return video;
+ return { element: video, source: videoSource };
}
- if (audioData) {
- const audio = new Audio(`/meme/${audioData.file}`);
+ if (audioSource) {
+ const audio = new Audio(`/meme/${audioSource.file}`);
audio.autoplay = true;
audio.load();
- return audio;
+ return { element: audio, source: audioSource };
}
return undefined;
diff --git a/app/src/room/space.ts b/app/src/room/space.ts
index 33489ecd..844078da 100644
--- a/app/src/room/space.ts
+++ b/app/src/room/space.ts
@@ -3,6 +3,9 @@ import { Effect, Signal } from "@kixelated/signals";
import { Broadcast, BroadcastSource } from "./broadcast";
import type { Canvas } from "./canvas";
import { Vector } from "./geometry";
+import { BorderRenderer } from "./gl/border";
+import { BroadcastRenderer } from "./gl/broadcast";
+import { OutlineRenderer } from "./gl/outline";
import type { Sound } from "./sound";
export type SpaceProps = {
@@ -30,6 +33,11 @@ export class Space {
#maxZ = 0;
+ // WebGL renderers
+ #borderRenderer: BorderRenderer;
+ #outlineRenderer: OutlineRenderer;
+ #broadcastRenderer: BroadcastRenderer;
+
// Touch handling for mobile
#touches = new Map();
#pinchStartDistance = 0;
@@ -42,6 +50,11 @@ export class Space {
this.sound = sound;
this.profile = props?.profile ?? false;
+ // Initialize WebGL renderers
+ this.#borderRenderer = new BorderRenderer(canvas);
+ this.#outlineRenderer = new OutlineRenderer(canvas);
+ this.#broadcastRenderer = new BroadcastRenderer(canvas);
+
// Use the new eventListener helper that automatically handles cleanup
this.#signals.event(canvas.element, "mousedown", this.#onMouseDown.bind(this));
this.#signals.event(canvas.element, "mousemove", this.#onMouseMove.bind(this));
@@ -56,11 +69,17 @@ export class Space {
this.#signals.effect(this.#runScale.bind(this));
- // This is a bit of a hack, but register our render method.
- this.canvas.onRender = this.#tick.bind(this);
+ // Register tick and render methods separately
+ this.canvas.onRender = this.#render.bind(this);
this.#signals.cleanup(() => {
this.canvas.onRender = undefined;
});
+
+ // Run tick separately from render at 60fps
+ this.#signals.effect((effect) => {
+ const interval = setInterval(() => this.#tickAll(), 1000 / 60);
+ effect.cleanup(() => clearInterval(interval));
+ });
}
#onMouseDown(e: MouseEvent) {
@@ -96,7 +115,10 @@ export class Space {
this.#publishPosition(this.#dragging);
this.#dragging = undefined;
- this.#hovering = undefined;
+ if (this.#hovering) {
+ this.#hovering.name.setHovering(false);
+ this.#hovering = undefined;
+ }
document.body.style.cursor = "default";
}
}
@@ -116,13 +138,22 @@ export class Space {
const broadcast = this.#at(mouse);
if (broadcast) {
- this.#hovering = broadcast;
+ if (this.#hovering !== broadcast) {
+ if (this.#hovering) {
+ this.#hovering.name.setHovering(false);
+ }
+ this.#hovering = broadcast;
+ this.#hovering.name.setHovering(true);
+ }
if (!broadcast.locked()) {
document.body.style.cursor = "grab";
}
} else {
- this.#hovering = undefined;
+ if (this.#hovering) {
+ this.#hovering.name.setHovering(false);
+ this.#hovering = undefined;
+ }
document.body.style.cursor = "default";
}
}
@@ -132,7 +163,10 @@ export class Space {
this.#publishPosition(this.#dragging);
this.#dragging = undefined;
- this.#hovering = undefined;
+ if (this.#hovering) {
+ this.#hovering.name.setHovering(false);
+ this.#hovering = undefined;
+ }
document.body.style.cursor = "default";
}
}
@@ -149,7 +183,13 @@ export class Space {
return;
}
- this.#hovering = broadcast;
+ if (this.#hovering !== broadcast) {
+ if (this.#hovering) {
+ this.#hovering.name.setHovering(false);
+ }
+ this.#hovering = broadcast;
+ this.#hovering.name.setHovering(true);
+ }
// Bump the z-index unless we're already at the top.
broadcast.position.update((prev) => ({
@@ -324,7 +364,10 @@ export class Space {
if (this.#touches.size === 0 && this.#dragging) {
this.#publishPosition(this.#dragging);
this.#dragging = undefined;
- this.#hovering = undefined;
+ if (this.#hovering) {
+ this.#hovering.name.setHovering(false);
+ this.#hovering = undefined;
+ }
this.#pinchStartDistance = 0;
this.#pinchStartScale = 1;
}
@@ -360,7 +403,10 @@ export class Space {
if (this.#dragging) {
this.#publishPosition(this.#dragging);
this.#dragging = undefined;
- this.#hovering = undefined;
+ if (this.#hovering) {
+ this.#hovering.name.setHovering(false);
+ this.#hovering = undefined;
+ }
this.#pinchStartDistance = 0;
this.#pinchStartScale = 1;
}
@@ -392,6 +438,9 @@ export class Space {
z: ++this.#maxZ,
}));
+ // Set profile mode for the name display
+ broadcast.name.setProfile(this.profile);
+
if (this.lookup.has(id)) {
throw new Error(`broadcast already exists: ${id}`);
}
@@ -449,6 +498,8 @@ export class Space {
const name = effect.get(broadcast.source.user.name);
if (!name) return;
+ if (name.endsWith("(screen)")) return;
+
this.sound.tts.joined(name);
});
@@ -458,6 +509,8 @@ export class Space {
const name = effect.get(broadcast.source.user.name);
if (!name) return;
+ if (name.endsWith("(screen)")) return;
+
this.sound.tts.left(name);
});
@@ -470,6 +523,8 @@ export class Space {
throw new Error(`broadcast not found: ${path}`);
}
+ broadcast.setOnline(false);
+
this.lookup.delete(path);
// Move it from the main list to the rip list.
@@ -495,15 +550,18 @@ export class Space {
return all;
}
- #tick(ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) {
+ // Tick physics separately from rendering
+ #tickAll() {
+ const now = performance.now();
+
for (const broadcast of this.#rip) {
- broadcast.tick();
+ broadcast.tick(now);
}
const broadcasts = this.ordered.peek();
for (const broadcast of broadcasts) {
- broadcast.tick();
+ broadcast.tick(now);
}
// Check for collisions.
@@ -534,90 +592,53 @@ export class Space {
b.velocity = b.velocity.sub(force);
}
}
-
- this.#render(ctx, now);
}
- #render(ctx: CanvasRenderingContext2D, now: DOMHighResTimeStamp) {
- // Render the audio click prompt if audio is suspended
- if (this.sound.suspended.peek() && !this.profile) {
- this.#renderAudioPrompt(ctx);
- }
-
+ // Render using WebGL
+ #render(now: DOMHighResTimeStamp) {
const broadcasts = this.ordered.peek();
+
+ // Render in order: black borders (back) -> audio viz (middle) -> videos (front)
+ // This way audio viz shows through overlapping black borders
+
+ // 1. Render black borders (furthest back)
+ for (const broadcast of this.#rip) {
+ this.#borderRenderer.render(broadcast, this.canvas.camera, this.#maxZ);
+ }
for (const broadcast of broadcasts) {
- broadcast.audio.renderBackground(ctx);
+ this.#borderRenderer.render(broadcast, this.canvas.camera, this.#maxZ);
}
+ // 2. Render audio visualizations (middle layer)
+ for (const broadcast of this.#rip) {
+ this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now);
+ }
for (const broadcast of broadcasts) {
- broadcast.audio.render(ctx);
+ this.#outlineRenderer.render(broadcast, this.canvas.camera, this.#maxZ, now);
}
- // Broadcasts fading out don't have collision so they're in a separate structure.
+ // 3. Render video content (front layer)
for (const broadcast of this.#rip) {
- broadcast.video.render(now, ctx);
+ this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ);
}
+ // Render all broadcasts (except dragging)
for (const broadcast of broadcasts) {
if (this.#dragging !== broadcast) {
- ctx.save();
- broadcast.video.render(now, ctx, {
+ this.#broadcastRenderer.render(broadcast, this.canvas.camera, this.#maxZ, {
hovering: this.#hovering === broadcast || this.profile,
});
- ctx.restore();
}
}
- // Render the dragging broadcast last so it's always on top.
+ // Render the dragging broadcast last so it's always on top
if (this.#dragging) {
- ctx.save();
- ctx.fillStyle = "rgba(0, 0, 0, 0.5)";
- this.#dragging.video.render(now, ctx, { dragging: true });
- ctx.restore();
- }
-
- // Render the locator arrows for our broadcasts on join.
- for (const broadcast of broadcasts) {
- if (broadcast.source instanceof Publish.Broadcast) {
- broadcast.renderLocator(now, ctx);
- }
+ this.#broadcastRenderer.render(this.#dragging, this.canvas.camera, this.#maxZ, {
+ dragging: true,
+ });
}
}
- #renderAudioPrompt(ctx: CanvasRenderingContext2D) {
- ctx.save();
-
- // Use logical dimensions (CSS pixels)
- const width = ctx.canvas.width / window.devicePixelRatio;
- const padding = 30;
- const boxWidth = 400;
- const height = 80;
- const y = ctx.canvas.height / window.devicePixelRatio - height - padding;
- const x = (width - boxWidth) / 2;
- const borderRadius = 16;
-
- // Rounded rectangle with thick black border
- ctx.fillStyle = "rgba(0, 0, 0, 0.9)";
- ctx.beginPath();
- ctx.roundRect(x, y, boxWidth, height, borderRadius);
- ctx.fill();
-
- // Thick border
- ctx.strokeStyle = "rgba(0, 0, 0, 1)";
- ctx.lineWidth = 6;
- ctx.stroke();
-
- // Text
- const fontSize = Math.round(24); // round to avoid busting font caches
- ctx.font = `${fontSize}px sans-serif`;
- ctx.fillStyle = "rgba(255, 255, 255, 0.75)";
- ctx.textAlign = "center";
- ctx.textBaseline = "middle";
- ctx.fillText("🔊 Click to enable audio", width * 0.5, y + height / 2);
-
- ctx.restore();
- }
-
#runScale(effect: Effect) {
const broadcasts = effect.get(this.ordered);
if (broadcasts.length === 0) {
@@ -641,6 +662,10 @@ export class Space {
}
close() {
+ this.#borderRenderer.close();
+ this.#outlineRenderer.close();
+ this.#broadcastRenderer.close();
+
this.#signals.close();
for (const broadcast of this.ordered.peek()) {
diff --git a/app/src/room/video.ts b/app/src/room/video.ts
index f4ea7271..136444a5 100644
--- a/app/src/room/video.ts
+++ b/app/src/room/video.ts
@@ -1,12 +1,11 @@
import { Publish, Watch } from "@kixelated/hang";
import { Effect, Signal } from "@kixelated/signals";
import * as Api from "../api";
+import Settings from "../settings";
import type { Broadcast } from "./broadcast";
-import { FakeBroadcast } from "./fake";
-import { Vector } from "./geometry";
-import { MEME_AUDIO, MEME_AUDIO_LOOKUP, MEME_VIDEO, MEME_VIDEO_LOOKUP, type MemeVideoName } from "./meme";
+import { Bounds, Vector } from "./geometry";
-export type VideoSource = Watch.Video.Source | Publish.Video.Encoder;
+//export type VideoSource = Watch.Video.Source | Publish.Video.Encoder;
export class Video {
// We don't use the Video renderer that comes with hang because it assumes a single video source.
@@ -16,29 +15,93 @@ export class Video {
// The avatar image.
avatar = new Image();
- // 1 when a video frame is fully rendered, 0 when their avatar is fully rendered.
- avatarTransition = 0;
-
// The size of the avatar in pixels.
avatarSize = new Signal(undefined);
- // The current video frame.
- frame?: CanvasImageSource;
-
// The desired size of the video in pixels.
targetSize = new Signal(Vector.create(128, 128));
- // The opacity from 0 to 1, where 0 is offline and 1 is online.
- online = 0;
+ // Time-based transition tracking (in milliseconds)
+ #memeTransition: DOMHighResTimeStamp = 0; // When meme started appearing/disappearing
+ #frameTransition: DOMHighResTimeStamp = 0;
+ frameActive: boolean = false;
+
+ // Computed opacity values (calculated once per frame instead of per pixel)
+ frameOpacity: number = 0;
+ memeOpacity: number = 0;
+
+ // Signal that updates when meme video dimensions are loaded
+ #memeSize = new Signal(undefined);
+
+ // Cached meme bounds (x_offset, y_offset, width_scale, height_scale)
+ memeBounds?: Bounds;
+ memeActive: Signal = new Signal(false);
- #memeOpacity = 0;
- #nameOpacity = 0;
+ // WebGL textures for this broadcast
+ frameTexture: WebGLTexture; // Video texture
+ avatarTexture: WebGLTexture; // Avatar texture
+ memeTexture: WebGLTexture; // Meme texture
+ #gl: WebGL2RenderingContext;
+
+ // Render avatars and emojis at this size
+ #renderSize = new Signal(128);
+
+ // Whether to flip the video horizontally (for self-preview)
+ flip = new Signal(false);
constructor(broadcast: Broadcast) {
this.broadcast = broadcast;
+
+ this.#gl = broadcast.canvas.gl;
+
+ // Create the textures
+ this.frameTexture = this.#gl.createTexture();
+ this.avatarTexture = this.#gl.createTexture();
+ this.memeTexture = this.#gl.createTexture();
+
+ // Initialize textures with 1x1 transparent pixel to make them renderable
+ const emptyPixel = new Uint8Array([0, 0, 0, 0]);
+ for (const texture of [this.frameTexture, this.avatarTexture, this.memeTexture]) {
+ this.#gl.bindTexture(this.#gl.TEXTURE_2D, texture);
+ this.#gl.texImage2D(
+ this.#gl.TEXTURE_2D,
+ 0,
+ this.#gl.RGBA,
+ 1,
+ 1,
+ 0,
+ this.#gl.RGBA,
+ this.#gl.UNSIGNED_BYTE,
+ emptyPixel,
+ );
+ this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_WRAP_S, this.#gl.CLAMP_TO_EDGE);
+ this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_WRAP_T, this.#gl.CLAMP_TO_EDGE);
+ this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_MIN_FILTER, this.#gl.LINEAR);
+ this.#gl.texParameteri(this.#gl.TEXTURE_2D, this.#gl.TEXTURE_MAG_FILTER, this.#gl.LINEAR);
+ }
+ this.#gl.bindTexture(this.#gl.TEXTURE_2D, null);
+
+ // Set up texture upload effects
+ this.broadcast.signals.effect(this.#runFrame.bind(this));
+ this.broadcast.signals.effect(this.#runMeme.bind(this));
+ this.broadcast.signals.effect(this.#runMemeBounds.bind(this));
this.broadcast.signals.effect(this.#runAvatar.bind(this));
this.broadcast.signals.effect(this.#runTargetSize.bind(this));
- this.broadcast.signals.effect(this.#runFrame.bind(this));
+ this.broadcast.signals.effect(this.#runMemeTransition.bind(this));
+ this.broadcast.signals.effect(this.#runFlip.bind(this));
+
+ this.broadcast.signals.effect(this.#runRenderSize.bind(this));
+ }
+
+ #runFlip(effect: Effect) {
+ // Flipping is a mess because there's no way to encode a flipped frame, only to decode it flipped.
+ if (this.broadcast.source instanceof Publish.Broadcast) {
+ const flip = effect.get(this.broadcast.source.video.hd.config)?.flip ?? false;
+ this.flip.set(flip);
+ } else if (this.broadcast.source instanceof Watch.Broadcast) {
+ const flip = effect.get(this.broadcast.source.video.active)?.config.flip ?? false;
+ this.flip.set(flip);
+ }
}
#runAvatar(effect: Effect) {
@@ -53,14 +116,51 @@ export class Video {
// TODO only set the avatar if it successfully loads
const newAvatar = new Image();
+
+ // Enable CORS for external avatar images
+ newAvatar.crossOrigin = "anonymous";
+
+ // For SVGs, load at higher resolution to avoid pixelation
+ // Set a reasonable size (e.g., 512x512) for better quality
+ if (avatar.endsWith(".svg")) {
+ const size = effect.get(this.#renderSize);
+ newAvatar.width = size;
+ newAvatar.height = size;
+ }
+
newAvatar.src = avatar;
- const load = () => {
- this.avatar = newAvatar;
- this.avatarSize.set(Vector.create(newAvatar.width, newAvatar.height));
- };
+ // Once the avatar loads, upload it to the texture
+ effect.event(newAvatar, "load", () => {
+ const avatarSize = Vector.create(
+ newAvatar.naturalWidth || newAvatar.width,
+ newAvatar.naturalHeight || newAvatar.height,
+ );
+ effect.set(this.avatarSize, avatarSize);
+
+ effect.effect((effect) => {
+ const size = effect.get(this.#renderSize);
+ this.#imageToTexture(newAvatar, this.avatarTexture, size);
+ });
+ });
+ }
- effect.event(newAvatar, "load", load);
+ #imageToTexture(src: HTMLImageElement, dst: WebGLTexture, size: number) {
+ const canvas = document.createElement("canvas");
+ canvas.width = size;
+ canvas.height = size;
+ const ctx = canvas.getContext("2d");
+ if (!ctx) throw new Error("Failed to get context");
+ ctx.drawImage(src, 0, 0, size, size);
+
+ const gl = this.#gl;
+ gl.bindTexture(gl.TEXTURE_2D, dst);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
+ gl.bindTexture(gl.TEXTURE_2D, null);
}
#runTargetSize(effect: Effect) {
@@ -89,268 +189,237 @@ export class Video {
}
#runFrame(effect: Effect) {
- if (this.broadcast.source instanceof FakeBroadcast) {
- // TODO FakeBroadcast should return a VideoFrame instead of a HTMLVideoElement.
- this.frame = effect.get(this.broadcast.source.video.frame);
- } else {
- const frame = effect.get(this.broadcast.source.video.frame)?.clone();
- effect.cleanup(() => frame?.close());
- this.frame = frame;
+ const frame = effect.get(this.broadcast.source.video.frame);
+
+ if (!!frame !== this.frameActive) {
+ this.#frameTransition = performance.now();
+ this.frameActive = !!frame;
}
+
+ if (frame) this.#frameToTexture(frame, this.frameTexture);
}
- tick() {
- if (this.frame) {
- this.avatarTransition = Math.min(this.avatarTransition + 0.05, 1);
- } else {
- this.avatarTransition = Math.max(this.avatarTransition - 0.05, 0);
+ #runMeme(effect: Effect) {
+ const meme = effect.get(this.broadcast.meme);
+ if (!meme) {
+ this.memeActive.set(false);
+ return;
}
- if (this.broadcast.visible.peek()) {
- this.online += (1 - this.online) * 0.1;
- } else {
- this.online += (0 - this.online) * 0.1;
- }
+ const element = meme.element;
- /*
- const ZOOM_SPEED = 0.005;
- this.#zoom = this.#zoom.lerp(this.#zoomTarget, ZOOM_SPEED);
- */
- }
+ effect.event(element, "ended", () => {
+ this.memeActive.set(false);
+ });
- // Try to avoid any mutations in this function; do it in tick instead.
- render(
- _now: DOMHighResTimeStamp,
- ctx: CanvasRenderingContext2D,
- modifiers?: {
- dragging?: boolean;
- hovering?: boolean;
- },
- ) {
- ctx.save();
-
- const bounds = this.broadcast.bounds.peek();
- const scale = this.broadcast.zoom.peek();
-
- ctx.translate(bounds.position.x, bounds.position.y);
- ctx.globalAlpha *= this.online;
- ctx.fillStyle = "#000";
-
- ctx.save();
-
- // Add a drop shadow
- ctx.shadowColor = "rgba(0, 0, 0, 1.0)";
- ctx.shadowBlur = 16 * scale;
- ctx.shadowOffsetX = 0;
- ctx.shadowOffsetY = 4 * scale;
-
- // Create a rounded rectangle path
- const radius = 12 * scale;
- const w = bounds.size.x;
- const h = bounds.size.y;
-
- ctx.beginPath();
- ctx.moveTo(radius, 0);
- ctx.lineTo(w - radius, 0);
- ctx.quadraticCurveTo(w, 0, w, radius);
- ctx.lineTo(w, h - radius);
- ctx.quadraticCurveTo(w, h, w - radius, h);
- ctx.lineTo(radius, h);
- ctx.quadraticCurveTo(0, h, 0, h - radius);
- ctx.lineTo(0, radius);
- ctx.quadraticCurveTo(0, 0, radius, 0);
- ctx.closePath();
-
- ctx.fillStyle = "#000"; // just needed to apply the shadow
- ctx.fill();
-
- ctx.shadowColor = "transparent";
-
- // Clip and draw the image
- ctx.clip();
-
- // Apply an opacity to the image.
- if (modifiers?.dragging) {
- ctx.globalAlpha *= 0.7;
- }
+ if (element instanceof HTMLVideoElement) {
+ this.#videoToTexture(effect, element, this.memeTexture);
- if (this.frame && this.avatarTransition > 0) {
- ctx.save();
- ctx.globalAlpha *= this.avatarTransition;
-
- // Apply horizontal flip when rendering the preview.
- const flip =
- this.broadcast.source instanceof Publish.Broadcast &&
- this.broadcast.source.video.hd.config.peek()?.flip;
-
- if (flip) {
- ctx.save();
- ctx.scale(-1, 1);
- ctx.translate(-bounds.size.x, 0);
- ctx.drawImage(this.frame, 0, 0, bounds.size.x, bounds.size.y);
- ctx.restore();
- } else {
- ctx.drawImage(this.frame, 0, 0, bounds.size.x, bounds.size.y);
+ // Listen for loadedmetadata event to update meme size when dimensions are available
+ const updateSize = () => {
+ if (element.videoWidth > 0 && element.videoHeight > 0) {
+ this.memeActive.set(true);
+ effect.set(this.#memeSize, Vector.create(element.videoWidth, element.videoHeight));
+ }
+ };
+
+ // Check if already loaded
+ if (element.readyState >= 1) {
+ updateSize();
}
- ctx.restore();
+
+ // Listen for metadata load
+ effect.event(element, "loadedmetadata", updateSize);
+ } else if ("emoji" in meme.source) {
+ const emoji = meme.source.emoji;
+
+ effect.effect((effect) => {
+ // Audio meme - render emoji to texture
+ const size = effect.get(this.#renderSize);
+ this.#emojiToTexture(emoji, size);
+ });
+
+ this.memeActive.set(true);
}
+ }
+
+ #runMemeTransition(effect: Effect) {
+ effect.get(this.memeActive);
+ this.#memeTransition = performance.now();
+ }
+
+ #runMemeBounds(effect: Effect) {
+ const meme = effect.get(this.broadcast.meme);
+ if (!meme) return;
+
+ // Wait until meme dimensions are available
+ const memeSize = effect.get(this.#memeSize);
+ if (!memeSize) return;
+
+ // Also react to bounds changes
+ const bounds = effect.get(this.broadcast.bounds);
+
+ const fit = meme.source.fit || "cover";
+ const position = meme.source.position || "center";
- if (this.avatarTransition < 1) {
- ctx.save();
- ctx.globalAlpha *= 1 - this.avatarTransition;
+ // Calculate meme bounds based on fit and position
+ const aspectRatio = memeSize.x / memeSize.y;
+ const boundsAspectRatio = bounds.size.x / bounds.size.y;
+ let width: number;
+ let height: number;
- if (this.avatar.complete) {
- ctx.drawImage(this.avatar, 0, 0, bounds.size.x, bounds.size.y);
+ if (fit === "contain") {
+ // Fit entire video within bounds
+ if (aspectRatio > boundsAspectRatio) {
+ width = 1.0;
+ height = boundsAspectRatio / aspectRatio;
} else {
- ctx.fillRect(0, 0, bounds.size.x, bounds.size.y);
+ height = 1.0;
+ width = aspectRatio / boundsAspectRatio;
+ }
+ } else {
+ // cover: fill the bounds (may crop)
+ if (aspectRatio > boundsAspectRatio) {
+ height = 1.0;
+ width = aspectRatio / boundsAspectRatio;
+ } else {
+ width = 1.0;
+ height = boundsAspectRatio / aspectRatio;
}
-
- ctx.restore();
}
- const meme = this.broadcast.meme.peek();
- if (meme) {
- if (meme.currentTime > 0) {
- ctx.save();
- ctx.globalAlpha *= this.#memeOpacity;
-
- if (meme instanceof HTMLVideoElement) {
- // Get the meme configuration
- const memeName = this.broadcast.memeName.peek();
- let fit: "contain" | "cover" = "cover"; // default
- let position = "center"; // default
-
- if (memeName) {
- // Remove hyphens for lookup if needed
- const lookupKey = memeName.toLowerCase().replace(/-/g, "");
- const memeKey = MEME_VIDEO_LOOKUP[lookupKey] || memeName;
- const memeData = MEME_VIDEO[memeKey as MemeVideoName];
- if (memeData) {
- fit = memeData.fit || "cover";
- position = memeData.position || "center";
- }
- }
-
- const aspectRatio = meme.videoWidth / meme.videoHeight;
- const boundsAspectRatio = bounds.size.x / bounds.size.y;
- let width: number;
- let height: number;
-
- if (fit === "contain") {
- // Fit entire video within bounds (may have letterbox/pillarbox)
- if (aspectRatio > boundsAspectRatio) {
- // Video is wider than bounds - fit by width
- width = bounds.size.x;
- height = width / aspectRatio;
- } else {
- // Video is taller than bounds - fit by height
- height = bounds.size.y;
- width = height * aspectRatio;
- }
- } else {
- // cover: fill the bounds (may crop)
- if (aspectRatio > boundsAspectRatio) {
- // Video is wider than bounds - use height to fill
- height = bounds.size.y;
- width = height * aspectRatio;
- } else {
- // Video is taller than bounds - use width to fill
- width = bounds.size.x;
- height = width / aspectRatio;
- }
- }
-
- // Parse position string (e.g., "center", "bottom", "bottom left", "50% 75%")
- let xPos = 0.5; // default center
- let yPos = 0.5; // default center
-
- const positionParts = position.toLowerCase().split(/\s+/);
- for (const part of positionParts) {
- if (part === "left") xPos = 0;
- else if (part === "right") xPos = 1;
- else if (part === "top") yPos = 0;
- else if (part === "bottom") yPos = 1;
- else if (part === "center") {
- // Keep defaults
- } else if (part.endsWith("%")) {
- const value = parseFloat(part) / 100;
- // Determine if this is x or y based on what we've seen
- if (positionParts.length === 1) {
- xPos = value; // Single value applies to x
- } else if (positionParts.indexOf(part) === 0) {
- xPos = value; // First value is x
- } else {
- yPos = value; // Second value is y
- }
- }
- }
-
- // Calculate position based on alignment
- const x = (bounds.size.x - width) * xPos;
- const y = (bounds.size.y - height) * yPos;
-
- // Add a pixel in each direction to account for any rounding errors.
- ctx.drawImage(meme, x - 1, y - 1, width + 2, height + 2);
+ // Parse position string
+ let xPos = 0.5;
+ let yPos = 0.5;
+
+ const positionParts = position.toLowerCase().split(/\s+/);
+ for (const part of positionParts) {
+ if (part === "left") xPos = 0;
+ else if (part === "right") xPos = 1;
+ else if (part === "top") yPos = 0;
+ else if (part === "bottom") yPos = 1;
+ else if (part === "center") {
+ // Keep defaults
+ } else if (part.endsWith("%")) {
+ const value = parseFloat(part) / 100;
+ if (positionParts.length === 1) {
+ xPos = value;
+ } else if (positionParts.indexOf(part) === 0) {
+ xPos = value;
} else {
- // Get the emoji for this audio meme
- const memeName = this.broadcast.memeName.peek();
- let emoji = "🔊"; // Default speaker emoji
-
- if (memeName) {
- // Remove hyphens for lookup if needed
- const lookupKey = memeName.toLowerCase().replace(/-/g, "");
- const memeKey = MEME_AUDIO_LOOKUP[lookupKey] || memeName;
- const memeData = MEME_AUDIO[memeKey as keyof typeof MEME_AUDIO];
- if (memeData) {
- emoji = memeData.emoji;
- }
- }
-
- const fontSize = Math.round(32 + 32 * scale); // round to avoid busting font caches
- // Draw the emoji for this audio meme
- ctx.font = `bold ${fontSize}px Arial`;
- ctx.fillStyle = "white";
- // Render it at the bottom center of the bounds.
- ctx.fillText(emoji, bounds.size.x / 2 - fontSize / 2, bounds.size.y - fontSize / 2);
+ yPos = value;
}
-
- ctx.restore();
}
+ }
- if (meme.ended || (meme.paused && meme.currentTime > 0)) {
- this.#memeOpacity += -this.#memeOpacity * 0.1;
- if (this.#memeOpacity <= 0) {
- this.broadcast.meme.set(undefined);
- this.broadcast.memeName.set(undefined);
- }
- } else {
- this.#memeOpacity += (1 - this.#memeOpacity) * 0.1;
+ // Calculate offset in texture coordinates (0-1 range)
+ this.memeBounds = new Bounds(
+ Vector.create((1.0 - width) * xPos, (1.0 - height) * yPos),
+ Vector.create(width, height),
+ );
+
+ effect.cleanup(() => {
+ this.memeBounds = undefined;
+ });
+ }
+
+ #frameToTexture(src: VideoFrame, dst: WebGLTexture) {
+ const gl = this.#gl;
+ gl.bindTexture(gl.TEXTURE_2D, dst);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, src);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.bindTexture(gl.TEXTURE_2D, null);
+ }
+
+ #videoToTexture(effect: Effect, src: HTMLVideoElement, dst: WebGLTexture) {
+ const gl = this.#gl;
+
+ let cancel: number;
+ const onFrame = () => {
+ gl.bindTexture(gl.TEXTURE_2D, dst);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, src);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.bindTexture(gl.TEXTURE_2D, null);
+
+ if (!src.paused && !src.ended) {
+ cancel = src.requestVideoFrameCallback(onFrame);
}
+ };
+
+ cancel = src.requestVideoFrameCallback(onFrame);
+
+ effect.cleanup(() => src.cancelVideoFrameCallback(cancel));
+ }
+
+ #emojiToTexture(emoji: string, size: number) {
+ const gl = this.#gl;
+
+ // Create offscreen canvas
+ const canvas = document.createElement("canvas");
+ canvas.width = size;
+ canvas.height = size;
+ const ctx = canvas.getContext("2d");
+ if (!ctx) throw new Error("Failed to get context");
+
+ // Render emoji centered
+ ctx.textAlign = "center";
+ ctx.textBaseline = "middle";
+ ctx.font = `${size * 0.5}px "Apple Color Emoji", "Segoe UI Emoji", "Noto Color Emoji", sans-serif`;
+ // Shift down slightly to compensate for emoji baseline issues
+ ctx.fillText(emoji, size / 2, size * 0.56);
+
+ // Upload to texture
+ gl.bindTexture(gl.TEXTURE_2D, this.memeTexture);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.bindTexture(gl.TEXTURE_2D, null);
+
+ // Set meme size for bounds calculation
+ this.#memeSize.set(Vector.create(size, size));
+ }
+
+ #runRenderSize(effect: Effect) {
+ const scale = effect.get(Settings.render.scale);
+ const target = effect.get(this.broadcast.bounds).size;
+ const size = Math.sqrt(target.x * target.y) * scale;
+ // Increase to the nearest power of 2
+ const power = Math.ceil(Math.log2(size));
+ this.#renderSize.set(Math.min(2 ** power, 512 * scale));
+ }
+
+ // Update opacity values based on current time (called once per frame)
+ tick(now: DOMHighResTimeStamp) {
+ const TRANSITION_DURATION = 300; // ms
+
+ // Calculate frame opacity
+ const frameElapsed = now - this.#frameTransition;
+ if (this.frameActive) {
+ this.frameOpacity = Math.min(1, Math.max(0, frameElapsed / TRANSITION_DURATION));
+ } else {
+ this.frameOpacity = Math.max(0, 1 - frameElapsed / TRANSITION_DURATION);
}
- // Cancel the clip
- ctx.restore();
-
- // Render the display name when hovering.
- const targetOpacity = modifiers?.hovering ? 1 : 0;
- this.#nameOpacity += (targetOpacity - this.#nameOpacity) * 0.1;
-
- const name = this.broadcast.source.user.name.peek();
-
- if (this.#nameOpacity > 0 && name) {
- const fontSize = Math.round(Math.max(14 * scale, 10));
- ctx.save();
- ctx.globalAlpha *= this.#nameOpacity;
- ctx.font = `bold ${fontSize}px Arial`;
- ctx.fillStyle = "white";
- ctx.strokeStyle = "black";
- ctx.lineWidth = 2 * scale;
- const offset = 12 * scale;
- ctx.strokeText(name, offset, 2 * offset, bounds.size.x - 2 * offset);
- ctx.fillText(name, offset, 2 * offset, bounds.size.x - 2 * offset);
- ctx.restore();
+ // Calculate meme opacity
+ const memeElapsed = now - this.#memeTransition;
+ if (this.memeActive.peek()) {
+ this.memeOpacity = Math.min(1, Math.max(0, memeElapsed / TRANSITION_DURATION));
+ } else {
+ this.memeOpacity = Math.max(0, 1 - memeElapsed / TRANSITION_DURATION);
}
+ }
- ctx.restore();
+ close() {
+ this.#gl.deleteTexture(this.frameTexture);
+ this.#gl.deleteTexture(this.avatarTexture);
+ this.#gl.deleteTexture(this.memeTexture);
}
}
diff --git a/app/src/settings.tsx b/app/src/settings.tsx
index 1d8b357c..19ab0414 100644
--- a/app/src/settings.tsx
+++ b/app/src/settings.tsx
@@ -72,6 +72,22 @@ export const Settings = {
step: new Signal(Number.parseInt(localStorage.getItem("settings.tutorial.step") ?? "0", 10)),
},
+ // Rendering settings
+ render: {
+ scale: new Signal(
+ (() => {
+ const stored = localStorage.getItem("settings.render.scale");
+ if (stored) {
+ const parsed = Number.parseFloat(stored);
+ if (!Number.isNaN(parsed) && parsed > 0 && parsed <= window.devicePixelRatio) {
+ return parsed;
+ }
+ }
+ return window.devicePixelRatio;
+ })(),
+ ),
+ },
+
clear: () => {
localStorage.clear();
window.location.reload();
@@ -190,6 +206,10 @@ effect.subscribe(Settings.tutorial.step, (step) => {
localStorage.setItem("settings.tutorial.step", step.toString());
});
+effect.subscribe(Settings.render.scale, (ratio) => {
+ localStorage.setItem("settings.render.scale", ratio.toString());
+});
+
// Mostly just to avoid console warnings about signals not being closed
document.addEventListener("unload", () => {
effect.close();
@@ -201,6 +221,16 @@ export function Modal(props: { sound: Sound }): JSX.Element {
const draggable = solid(Settings.draggable);
const tts = createSelector(solid(Settings.audio.tts));
const webGPUSupported = supportsWebGPU();
+ const devicePixelRatio = solid(Settings.render.scale);
+ const maxDevicePixelRatio = window.devicePixelRatio;
+
+ // Calculate available pixel ratio options (0.5x, 1x, 2x, 4x, 8x)
+ const pixelRatioOptions: number[] = [0.5];
+ for (let i = 1; i <= maxDevicePixelRatio; i *= 2) {
+ pixelRatioOptions.push(i);
+ }
+
+ const isSelectedRatio = createSelector(() => devicePixelRatio());
const progress = solid(props.sound.tts.progress);
const [isGenerating, setIsGenerating] = createSignal(false);
@@ -227,7 +257,7 @@ export function Modal(props: { sound: Sound }): JSX.Element {
-
+
Announce Join/Leave
{tts("none") && "No voice announcements"}
@@ -261,7 +291,7 @@ export function Modal(props: { sound: Sound }): JSX.Element {
)}
-
+
+ {/* Device Pixel Ratio */}
+
+
+
+
+
+ Pixel Ratio
+ Increase for better quality, but worse performance.
+
+
+ {pixelRatioOptions.map((ratio) => (
+ Settings.render.scale.set(ratio)}
+ >
+ {ratio}x
+
+ ))}
+
+
-
+
Remote Control
Allow others to drag/resize your camera
@@ -350,7 +405,7 @@ export function Modal(props: { sound: Sound }): JSX.Element {
type="checkbox"
checked={draggable()}
onChange={() => Settings.draggable.update((p) => !p)}
- class="cursor-pointer accent-blue-500 group-hover:accent-blue-400 transition-colors flex-grow"
+ class="cursor-pointer accent-blue-500 group-hover:accent-blue-400 transition-colors w-18"
/>
diff --git a/app/src/tauri/update.ts b/app/src/tauri/update.ts
index 912dccf0..a76fad8b 100644
--- a/app/src/tauri/update.ts
+++ b/app/src/tauri/update.ts
@@ -4,6 +4,7 @@ import * as Updater from "@tauri-apps/plugin-updater";
// VERY important that this doesn't throw an error
async function check() {
try {
+ console.log("checking for update");
const update = await Updater.check({ allowDowngrades: true });
if (!update) return;
diff --git a/app/vite.config.ts b/app/vite.config.ts
index e4c46e38..210ea336 100644
--- a/app/vite.config.ts
+++ b/app/vite.config.ts
@@ -5,7 +5,7 @@ import solid from "vite-plugin-solid";
import { viteStaticCopy } from "vite-plugin-static-copy";
// https://vitejs.dev/config/
-export default defineConfig(({ mode }) => {
+export default defineConfig(() => {
return {
define: {
TAURI: JSON.stringify(!!process.env.TAURI_ENV_PLATFORM),
@@ -28,6 +28,8 @@ export default defineConfig(({ mode }) => {
format: "es" as const,
},
+ assetsInclude: ["**/*.glsl", "**/*.vert", "**/*.frag"],
+
plugins: [
solid(),
tailwindcss(),
diff --git a/flake.nix b/flake.nix
index 85f875b8..0b70782d 100644
--- a/flake.nix
+++ b/flake.nix
@@ -33,6 +33,9 @@
# Icon generation tools
imagemagick
libicns # provides png2icns
+
+ # Shader validation
+ glslang
];
};
diff --git a/justfile b/justfile
index 7f18cecf..15d7ce39 100644
--- a/justfile
+++ b/justfile
@@ -53,10 +53,10 @@ dev:
native:
cd native && just dev
-# Open the Android Studio project
-android:
- cd native && just android
+# Run the Android build, using --open to open Android Studio
+android *args:
+ cd native && just android {{args}}
-# Open the Xcode project
-ios:
- cd native && just ios
+# Run the iOS build, using --open to open Xcode
+ios *args:
+ cd native && just ios {{args}}
diff --git a/moq b/moq
index 0a6101da..1223704c 160000
--- a/moq
+++ b/moq
@@ -1 +1 @@
-Subproject commit 0a6101daae9e363a99f9507c683502aa684d811c
+Subproject commit 1223704cfaff96852c8c58ab466af17f15486745
diff --git a/native/gen/apple/hang.xcodeproj/project.pbxproj b/native/gen/apple/hang.xcodeproj/project.pbxproj
index a3990e36..6590e642 100644
--- a/native/gen/apple/hang.xcodeproj/project.pbxproj
+++ b/native/gen/apple/hang.xcodeproj/project.pbxproj
@@ -232,8 +232,8 @@
"$(SRCROOT)/Externals/arm64/${CONFIGURATION}/libapp.a",
);
runOnlyForDeploymentPostprocessing = 0;
- shellPath = /bin/sh;
- shellScript = "bun tauri ios xcode-script -v --platform ${PLATFORM_DISPLAY_NAME:?} --sdk-root ${SDKROOT:?} --framework-search-paths \"${FRAMEWORK_SEARCH_PATHS:?}\" --header-search-paths \"${HEADER_SEARCH_PATHS:?}\" --gcc-preprocessor-definitions \"${GCC_PREPROCESSOR_DEFINITIONS:-}\" --configuration ${CONFIGURATION:?} ${FORCE_COLOR} ${ARCHS:?}\n";
+ shellPath = /bin/zsh;
+ shellScript = "export PATH=\"$HOME/.bun/bin:$PATH\"\neval \"$(/opt/homebrew/bin/brew shellenv)\"\nbun tauri ios xcode-script -v --platform ${PLATFORM_DISPLAY_NAME:?} --sdk-root ${SDKROOT:?} --framework-search-paths \"${FRAMEWORK_SEARCH_PATHS:?}\" --header-search-paths \"${HEADER_SEARCH_PATHS:?}\" --gcc-preprocessor-definitions \"${GCC_PREPROCESSOR_DEFINITIONS:-}\" --configuration ${CONFIGURATION:?} ${FORCE_COLOR} ${ARCHS:?}\n";
};
/* End PBXShellScriptBuildPhase section */
@@ -313,8 +313,10 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_ENTITLEMENTS = hang_iOS/hang_iOS.entitlements;
CODE_SIGN_IDENTITY = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
CODE_SIGN_STYLE = Manual;
- "DEVELOPMENT_TEAM[sdk=iphoneos*]" = "D7D5SDDB5Z";
+ DEVELOPMENT_TEAM = D7D5SDDB5Z;
+ "DEVELOPMENT_TEAM[sdk=iphoneos*]" = D7D5SDDB5Z;
ENABLE_BITCODE = NO;
"EXCLUDED_ARCHS[sdk=iphoneos*]" = x86_64;
FRAMEWORK_SEARCH_PATHS = (
@@ -342,18 +344,13 @@
);
PRODUCT_BUNDLE_IDENTIFIER = live.hang;
PRODUCT_NAME = "hang";
+ PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
"PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" = "3a83435e-25c5-4801-ba3d-e7f438c554da";
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = arm64;
- DEVELOPMENT_TEAM = "D7D5SDDB5Z";
- "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
- PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
};
- PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
- "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
name = release;
- PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
};
A981A291F13CD61156C96A03 /* debug */ = {
isa = XCBuildConfiguration;
@@ -426,8 +423,10 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_ENTITLEMENTS = hang_iOS/hang_iOS.entitlements;
CODE_SIGN_IDENTITY = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
CODE_SIGN_STYLE = Manual;
- "DEVELOPMENT_TEAM[sdk=iphoneos*]" = "D7D5SDDB5Z";
+ DEVELOPMENT_TEAM = D7D5SDDB5Z;
+ "DEVELOPMENT_TEAM[sdk=iphoneos*]" = D7D5SDDB5Z;
ENABLE_BITCODE = NO;
"EXCLUDED_ARCHS[sdk=iphoneos*]" = x86_64;
FRAMEWORK_SEARCH_PATHS = (
@@ -455,18 +454,13 @@
);
PRODUCT_BUNDLE_IDENTIFIER = live.hang;
PRODUCT_NAME = "hang";
+ PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
"PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" = "3a83435e-25c5-4801-ba3d-e7f438c554da";
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = arm64;
- DEVELOPMENT_TEAM = "D7D5SDDB5Z";
- "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
- PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
};
- PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
- "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Distribution: Luke Curley (D7D5SDDB5Z)";
name = debug;
- PROVISIONING_PROFILE_SPECIFIER = "3a83435e-25c5-4801-ba3d-e7f438c554da";
};
/* End XCBuildConfiguration section */
diff --git a/native/gen/apple/hang_iOS/Info.plist b/native/gen/apple/hang_iOS/Info.plist
index 93009027..547a368b 100644
--- a/native/gen/apple/hang_iOS/Info.plist
+++ b/native/gen/apple/hang_iOS/Info.plist
@@ -44,5 +44,7 @@
Request camera access
NSMicrophoneUsageDescription
Request microphone access
+
ITSAppUsesNonExemptEncryption
+
\ No newline at end of file
diff --git a/native/justfile b/native/justfile
index c7c37f18..3996f6fa 100644
--- a/native/justfile
+++ b/native/justfile
@@ -26,11 +26,11 @@ deploy platform:
dev:
bun tauri dev
-android:
- bun tauri android dev --host --open
+android *args:
+ bun tauri android dev --host {{args}}
-ios:
- bun tauri ios dev --host
+ios *args:
+ bun tauri ios dev --host {{args}}
# Generate Tauri icons from browser-generated ZIP files
# Prerequisites: Download all ZIP files from http://localhost:1420/dev/icons to this directory
diff --git a/native/src/lib.rs b/native/src/lib.rs
index 428cce59..db426ace 100644
--- a/native/src/lib.rs
+++ b/native/src/lib.rs
@@ -1,4 +1,4 @@
-use tauri::{Emitter, Manager};
+use tauri::Emitter;
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
@@ -57,6 +57,7 @@ async fn start_server(window: tauri::Window) -> Result
{
// Focus the main window
#[cfg(desktop)]
{
+ use tauri::Manager;
let _ = window.get_webview_window("main").expect("no main window").set_focus();
}
})