diff --git a/docs/app/(home)/components/Navbar.tsx b/docs/app/(home)/components/Navbar.tsx index 21db1bce8..9d9f2624c 100644 --- a/docs/app/(home)/components/Navbar.tsx +++ b/docs/app/(home)/components/Navbar.tsx @@ -17,11 +17,9 @@ import { BUTTON_SHADOW } from "./shared"; // Constants // --------------------------------------------------------------------------- -const NAV_TABS = ["Introduction", "OpenUI Lang", "Chat", "Playground", "API Reference"] as const; +const NAV_TABS = ["OpenUI Lang", "Playground", "API Reference"] as const; const TAB_URLS: Record = { - Introduction: "/docs/introduction", "OpenUI Lang": "/docs/openui-lang", - Chat: "/docs/chat", Playground: "/playground", "API Reference": "/docs/api-reference", }; diff --git a/docs/app/api/chat/route.ts b/docs/app/api/chat/route.ts new file mode 100644 index 000000000..4fdd33a0f --- /dev/null +++ b/docs/app/api/chat/route.ts @@ -0,0 +1,297 @@ +import { NextRequest } from "next/server"; +import OpenAI from "openai"; +import type { ChatCompletionMessageParam } from "openai/resources/chat/completions.mjs"; + +// ── Tool implementations ── + +function getWeather({ location }: { location: string }): Promise { + return new Promise((resolve) => { + setTimeout(() => { + const knownTemps: Record = { + tokyo: 22, "san francisco": 18, london: 14, "new york": 25, + paris: 19, sydney: 27, mumbai: 33, berlin: 16, + }; + const conditions = ["Sunny", "Partly Cloudy", "Cloudy", "Light Rain", "Clear Skies"]; + const temp = knownTemps[location.toLowerCase()] ?? Math.floor(Math.random() * 30 + 5); + const condition = conditions[Math.floor(Math.random() * conditions.length)]; + resolve(JSON.stringify({ + location, temperature_celsius: temp, + temperature_fahrenheit: Math.round(temp * 1.8 + 32), + condition, + humidity_percent: Math.floor(Math.random() * 40 + 40), + wind_speed_kmh: Math.floor(Math.random() * 25 + 5), + forecast: [ + { day: "Tomorrow", high: temp + 2, low: temp - 4, condition: "Partly Cloudy" }, + { day: "Day After", high: temp + 1, low: temp - 3, condition: "Sunny" }, + ], + })); + }, 800); + }); +} + +function getStockPrice({ symbol }: { symbol: string }): Promise { + return new Promise((resolve) => { + setTimeout(() => { + const s = symbol.toUpperCase(); + const knownPrices: Record = { + AAPL: 189.84, GOOGL: 141.8, TSLA: 248.42, MSFT: 378.91, + AMZN: 178.25, NVDA: 875.28, META: 485.58, + }; + const price = knownPrices[s] ?? Math.floor(Math.random() * 500 + 20); + const change = parseFloat((Math.random() * 8 - 4).toFixed(2)); + resolve(JSON.stringify({ + symbol: s, + price: parseFloat((price + change).toFixed(2)), + change, change_percent: parseFloat(((change / price) * 100).toFixed(2)), + volume: `${(Math.random() * 50 + 10).toFixed(1)}M`, + day_high: parseFloat((price + Math.abs(change) + 1.5).toFixed(2)), + day_low: parseFloat((price - Math.abs(change) - 1.2).toFixed(2)), + })); + }, 600); + }); +} + +function calculate({ expression }: { expression: string }): Promise { + return new Promise((resolve) => { + setTimeout(() => { + try { + const sanitized = expression.replace(/[^0-9+\-*/().%\s,Math.sqrtpowabsceilfloorround]/g, ""); + const result = new Function(`return (${sanitized})`)(); + resolve(JSON.stringify({ expression, result: Number(result) })); + } catch { + resolve(JSON.stringify({ expression, error: "Invalid expression" })); + } + }, 300); + }); +} + +function searchWeb({ query }: { query: string }): Promise { + return new Promise((resolve) => { + setTimeout(() => { + resolve(JSON.stringify({ + query, + results: [ + { title: `Top result for "${query}"`, snippet: `Comprehensive overview of ${query} with the latest information.` }, + { title: `${query} - Latest News`, snippet: `Recent developments and updates related to ${query}.` }, + { title: `Understanding ${query}`, snippet: `An in-depth guide explaining everything about ${query}.` }, + ], + })); + }, 1000); + }); +} + +// ── Tool definitions ── + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const tools: any[] = [ + { + type: "function", + function: { + name: "get_weather", + description: "Get current weather for a location.", + parameters: { + type: "object", + properties: { location: { type: "string", description: "City name" } }, + required: ["location"], + }, + function: getWeather, + parse: JSON.parse, + }, + }, + { + type: "function", + function: { + name: "get_stock_price", + description: "Get stock price for a ticker symbol.", + parameters: { + type: "object", + properties: { symbol: { type: "string", description: "Ticker symbol, e.g. AAPL" } }, + required: ["symbol"], + }, + function: getStockPrice, + parse: JSON.parse, + }, + }, + { + type: "function", + function: { + name: "calculate", + description: "Evaluate a math expression.", + parameters: { + type: "object", + properties: { expression: { type: "string", description: "Math expression to evaluate" } }, + required: ["expression"], + }, + function: calculate, + parse: JSON.parse, + }, + }, + { + type: "function", + function: { + name: "search_web", + description: "Search the web for information.", + parameters: { + type: "object", + properties: { query: { type: "string", description: "Search query" } }, + required: ["query"], + }, + function: searchWeb, + parse: JSON.parse, + }, + }, +]; + +// ── SSE helpers ── + +function sseToolCallStart( + encoder: TextEncoder, + tc: { id: string; function: { name: string } }, + index: number, +) { + return encoder.encode( + `data: ${JSON.stringify({ + id: `chatcmpl-tc-${tc.id}`, + object: "chat.completion.chunk", + choices: [{ + index: 0, + delta: { + tool_calls: [{ index, id: tc.id, type: "function", function: { name: tc.function.name, arguments: "" } }], + }, + finish_reason: null, + }], + })}\n\n`, + ); +} + +function sseToolCallArgs( + encoder: TextEncoder, + tc: { id: string; function: { arguments: string } }, + result: string, + index: number, +) { + let enrichedArgs: string; + try { + enrichedArgs = JSON.stringify({ _request: JSON.parse(tc.function.arguments), _response: JSON.parse(result) }); + } catch { + enrichedArgs = tc.function.arguments; + } + return encoder.encode( + `data: ${JSON.stringify({ + id: `chatcmpl-tc-${tc.id}-args`, + object: "chat.completion.chunk", + choices: [{ + index: 0, + delta: { tool_calls: [{ index, function: { arguments: enrichedArgs } }] }, + finish_reason: null, + }], + })}\n\n`, + ); +} + +// ── Route handler ── + +export async function POST(req: NextRequest) { + const { messages, systemPrompt } = await req.json(); + + const client = new OpenAI({ + apiKey: process.env.OPENROUTER_API_KEY, + baseURL: "https://openrouter.ai/api/v1", + }); + const MODEL = "openai/gpt-5.4"; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const cleanMessages = (messages as any[]) + .filter((m) => m.role !== "tool") + .map((m) => { + if (m.role === "assistant" && m.tool_calls?.length) { + const { tool_calls: _tc, ...rest } = m; // eslint-disable-line @typescript-eslint/no-unused-vars + return rest; + } + return m; + }); + + const chatMessages: ChatCompletionMessageParam[] = [ + ...(systemPrompt ? [{ role: "system" as const, content: systemPrompt }] : []), + ...cleanMessages, + ]; + + const encoder = new TextEncoder(); + let controllerClosed = false; + + const readable = new ReadableStream({ + start(controller) { + const enqueue = (data: Uint8Array) => { + if (controllerClosed) return; + try { controller.enqueue(data); } catch { /* already closed */ } + }; + const close = () => { + if (controllerClosed) return; + controllerClosed = true; + try { controller.close(); } catch { /* already closed */ } + }; + + const pendingCalls: Array<{ id: string; name: string; arguments: string }> = []; + let callIdx = 0; + let resultIdx = 0; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const runner = (client.chat.completions as any).runTools({ + model: MODEL, + messages: chatMessages, + tools, + stream: true + }); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + runner.on("functionToolCall", (fc: any) => { + const id = `tc-${callIdx}`; + pendingCalls.push({ id, name: fc.name, arguments: fc.arguments }); + enqueue(sseToolCallStart(encoder, { id, function: { name: fc.name } }, callIdx)); + callIdx++; + }); + + runner.on("functionToolCallResult", (result: string) => { + const tc = pendingCalls[resultIdx]; + if (tc) { + enqueue(sseToolCallArgs(encoder, { id: tc.id, function: { arguments: tc.arguments } }, result, resultIdx)); + } + resultIdx++; + }); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + runner.on("chunk", (chunk: any) => { + const choice = chunk.choices?.[0]; + const delta = choice?.delta; + if (!delta) return; + if (delta.content) { + enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`)); + } + if (choice?.finish_reason === "stop") { + enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`)); + } + }); + + runner.on("end", () => { + enqueue(encoder.encode("data: [DONE]\n\n")); + close(); + }); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + runner.on("error", (err: any) => { + const msg = err instanceof Error ? err.message : "Stream error"; + console.error("Chat route error:", msg); + enqueue(encoder.encode(`data: ${JSON.stringify({ error: msg })}\n\n`)); + close(); + }); + }, + }); + + return new Response(readable, { + headers: { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache, no-transform", + Connection: "keep-alive", + }, + }); +} diff --git a/docs/app/docs/[[...slug]]/page.tsx b/docs/app/docs/[[...slug]]/page.tsx index 356857356..77b887bb9 100644 --- a/docs/app/docs/[[...slug]]/page.tsx +++ b/docs/app/docs/[[...slug]]/page.tsx @@ -13,23 +13,18 @@ export default async function Page(props: PageProps<"/docs/[[...slug]]">) { if (!page) notFound(); const MDX = page.data.body; - const isIntroduction = page.slugs[0] === "introduction"; return ( - {!isIntroduction && ( - <> - {page.data.title} - {page.data.description} -
- - -
- - )} + {page.data.title} + {page.data.description} +
+ + +
* { - grid-area: main !important; -} diff --git a/docs/app/docs/introduction/page.tsx b/docs/app/docs/introduction/page.tsx deleted file mode 100644 index 630bdcbbf..000000000 --- a/docs/app/docs/introduction/page.tsx +++ /dev/null @@ -1,11 +0,0 @@ -import "@/app/docs/introduction/page.module.css"; -import { OverviewPage } from "@/components/overview-components/overview-page"; - -export const metadata = { - title: "Introduction", - description: "OpenUI is a comprehensive toolkit for building LLM-powered user interfaces.", -}; - -export default function IntroductionPage() { - return ; -} diff --git a/docs/app/docs/openui-lang/page.tsx b/docs/app/docs/openui-lang/page.tsx deleted file mode 100644 index 0c1cd6abc..000000000 --- a/docs/app/docs/openui-lang/page.tsx +++ /dev/null @@ -1,454 +0,0 @@ -"use client"; - -import { Button } from "@/components/button"; -import { CodeBlock, InlineButton, Separator, SimpleCard } from "@/components/overview-components"; -import { - ArrowRight, - BarChart3, - Code2, - FileText, - MessageSquare, - Shield, - Waves, - Zap, -} from "lucide-react"; -import { - FeatureCard, - FeatureCards, -} from "@/components/overview-components"; -import Link from "next/link"; -import { useState } from "react"; - -const steps = [ - { - title: "Define Library", - description: "Create your component library with Zod schemas and generate the system prompt", - code: `import { defineComponent, createLibrary } from '@openuidev/react-lang'; -import { z } from 'zod'; - -const Card = defineComponent({ - name: 'Card', - description: 'Displays a titled content card.', - props: z.object({ - title: z.string(), - }), - component: ({ props }) =>
{props.title}
, -}); - -export const library = createLibrary({ - components: [Card, ...otherComponents], -}); - -export const systemPrompt = library.prompt(); // Generated system prompt -`, - }, - { - title: "LLM Generates OpenUI Syntax", - description: "LLM outputs token-efficient, line-oriented syntax", - code: `root = Stack([welcomeCard]) -welcomeCard = Card([welcomeHeader, welcomeBody], "card") -welcomeHeader = CardHeader("Welcome", "Get started with our platform") -welcomeBody = Stack([signupForm], "column", "m") -signupForm = Form("signup", [nameField, emailField], actions) -nameField = FormControl("Name", Input("name", "Your name", "text", ["required", "minLength:2"])) -emailField = FormControl("Email", Input("email", "you@example.com", "email", ["required", "email"])) -actions = Buttons([signUpBtn, learnMoreBtn], "row") -signUpBtn = Button("Sign up", "submit:signup", "primary") -learnMoreBtn = Button("Learn more", "action:learn_more", "secondary") -`, - }, -]; - -export default function OpenUILangOverview() { - const [activeStep, setActiveStep] = useState(0); - - return ( -
- {/* Introduction */} -
-
-
-

OpenUI Lang

-

- A line-oriented language designed for streaming, token efficiency, and type safety -

-
-
- -

- An alternative to{" "} - - Vercel JSON renderer - {" "} - and{" "} - - A2UI - {" "} - that uses up to 67.1% fewer tokens than equivalent JSON structures. Define your component library - with Zod schemas and parse LLM responses into renderable components. -

- -
-
-
- - - - {/* Key Features */} -
-

Key Features

- - - } - title="Streaming Native" - description="Line-oriented syntax means the UI renders line-by-line. No waiting for valid JSON closing braces." - /> - } - title="Token Efficient" - description="Uses up to 67.1% fewer tokens than equivalent JSON structures, significantly reducing inference cost and latency." - /> - } - title="Hallucination Resistant" - description="Strictly typed against your Zod schemas. If the generated code does not match your definition, it does not render." - /> - -
- - - - {/* Comparison */} -
-

JSON vs OpenUI Lang

-

- Compare the same UI component in both formats -

- -
-
-
-

JSON Format

- - ~849 tokens - -
- -
- -
-
-

OpenUI Lang

- - ~294 tokens - -
- -
-
-
- - - - {/* How It Works */} -
-

How It Works

-

- Click through each step to see the complete workflow -

- -
- {steps.map((step, index) => ( - - ))} -
- - -
-
-
- {activeStep + 1} -
-
-

{steps[activeStep].title}

-

- {steps[activeStep].description} -

-
-
-
- - - -
- setActiveStep(Math.max(0, activeStep - 1))} - disabled={activeStep === 0} - > - Previous - - setActiveStep(Math.min(steps.length - 1, activeStep + 1))} - disabled={activeStep === steps.length - 1} - > - Next Step - -
-
-
- - - - {/* Use Cases */} -
-

Use Cases

-

- Real-world applications where OpenUI Lang excels -

- - - } - title="Analytics Dashboards" - description="Generate complex data visualizations and metric cards from natural language queries." - /> - } - title="AI Chat Interfaces" - description="Stream UI components in real-time as the LLM generates responses." - /> - } - title="Dynamic Forms" - description="Build adaptive forms that change based on user input or context." - /> - -
-
- ); -} diff --git a/docs/app/docs/openui-lang/streaming-comparison.tsx b/docs/app/docs/openui-lang/streaming-comparison.tsx new file mode 100644 index 000000000..e4249be23 --- /dev/null +++ b/docs/app/docs/openui-lang/streaming-comparison.tsx @@ -0,0 +1,417 @@ +"use client"; + +import { useCallback, useEffect, useRef, useState } from "react"; +import { RotateCcw, Check } from "lucide-react"; + +const JSON_CODE = `{ + "component": { + "component": "Stack", + "props": { + "children": [ + { + "component": "TextContent", + "props": { + "text": "Contact Us", + "size": "large-heavy" + } + }, + { + "component": "Form", + "props": { + "name": "contact", + "fields": [ + { + "component": "FormControl", + "props": { + "label": "Name", + "input": { + "component": "Input", + "props": { + "name": "name", + "placeholder": "Your full name", + "type": "text", + "rules": [ + "required", + "minLength:2" + ] + } + } + } + }, + { + "component": "FormControl", + "props": { + "label": "Email", + "input": { + "component": "Input", + "props": { + "name": "email", + "placeholder": "you@example.com", + "type": "email", + "rules": [ + "required", + "email" + ] + } + } + } + }, + { + "component": "FormControl", + "props": { + "label": "Phone", + "input": { + "component": "Input", + "props": { + "name": "phone", + "placeholder": "e.g., +1 555 123 4567", + "type": "text", + "rules": [ + "required", + "minLength:7", + "maxLength:20" + ] + } + } + } + }, + { + "component": "FormControl", + "props": { + "label": "Subject", + "input": { + "component": "Select", + "props": { + "name": "subject", + "items": [ + { + "component": "SelectItem", + "props": { + "value": "general", + "label": "General inquiry" + } + }, + { + "component": "SelectItem", + "props": { + "value": "support", + "label": "Support" + } + }, + { + "component": "SelectItem", + "props": { + "value": "sales", + "label": "Sales" + } + }, + { + "component": "SelectItem", + "props": { + "value": "billing", + "label": "Billing" + } + }, + { + "component": "SelectItem", + "props": { + "value": "feedback", + "label": "Feedback" + } + } + ], + "placeholder": "Select a subject...", + "rules": [ + "required" + ] + } + } + } + }, + { + "component": "FormControl", + "props": { + "label": "Message", + "input": { + "component": "TextArea", + "props": { + "name": "message", + "placeholder": "How can we help?", + "rows": 6, + "rules": [ + "required", + "minLength:10" + ] + } + } + } + } + ], + "buttons": { + "component": "Buttons", + "props": { + "buttons": [ + { + "component": "Button", + "props": { + "label": "Submit", + "action": "submit:contact", + "variant": "primary" + } + }, + { + "component": "Button", + "props": { + "label": "Cancel", + "action": "action:cancel_contact", + "variant": "secondary" + } + } + ], + "direction": "row" + } + } + } + } + ], + "direction": "column", + "gap": "l" + } + }, + "error": null +}`; + +const OPENUI_CODE = `root = Stack([title, form], "column", "l") +title = TextContent("Contact Us", "large-heavy") +form = Form("contact", [nameField, emailField, phoneField, subjectField, messageField], formButtons) +nameField = FormControl("Name", Input("name", "Your full name", "text", ["required", "minLength:2"])) +emailField = FormControl("Email", Input("email", "you@example.com", "email", ["required", "email"])) +phoneField = FormControl("Phone", Input("phone", "e.g., +1 555 123 4567", "text", ["required", "minLength:7", "maxLength:20"])) +subjectField = FormControl("Subject", Select("subject", subjectOptions, "Select a subject...", ["required"])) +messageField = FormControl("Message", TextArea("message", "How can we help?", 6, ["required", "minLength:10"])) +subjectOptions = [SelectItem("general", "General inquiry"), SelectItem("support", "Support"), SelectItem("sales", "Sales"), SelectItem("billing", "Billing"), SelectItem("feedback", "Feedback")] +formButtons = Buttons([submitBtn, cancelBtn], "row") +submitBtn = Button("Submit", "submit:contact", "primary") +cancelBtn = Button("Cancel", "action:cancel_contact", "secondary")`; + +// Calculated using openai tokenizer, https://platform.openai.com/tokenizer +const JSON_TOKENS = 849; +const OPENUI_TOKENS = 294; +const TOKEN_RATE = 60; // tokens per second + +const JSON_DURATION = JSON_TOKENS / TOKEN_RATE; // ~5.7s +const OPENUI_DURATION = OPENUI_TOKENS / TOKEN_RATE; // ~2.0s + +type StreamState = "idle" | "streaming" | "done"; + +function StreamingCodeBlock({ + code, + charCount, + totalTokens, + currentTokens, + elapsed, + totalDuration, + state, + label, + variant, +}: { + code: string; + charCount: number; + totalTokens: number; + currentTokens: number; + elapsed: number; + totalDuration: number; + state: StreamState; + label: string; + variant: "red" | "green"; +}) { + const preRef = useRef(null); + const displayedCode = code.slice(0, charCount); + const isDone = state === "done"; + + useEffect(() => { + if (preRef.current && state === "streaming") { + preRef.current.scrollTop = preRef.current.scrollHeight; + } + }, [charCount, state]); + + const pillBg = variant === "red" + ? "bg-red-50 text-red-600 dark:bg-red-900/20 dark:text-red-400" + : "bg-emerald-50 text-emerald-600 dark:bg-emerald-900/20 dark:text-emerald-400"; + + const doneBg = variant === "red" + ? "bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-300" + : "bg-emerald-100 text-emerald-700 dark:bg-emerald-900/30 dark:text-emerald-300"; + + return ( +
+
+

{label}

+
+ + {state === "streaming" + ? `${currentTokens}/${totalTokens} tokens` + : `${totalTokens} tokens`} + + {state === "streaming" && ( + + {elapsed.toFixed(1)}s + + )} + {isDone && ( + + {totalDuration.toFixed(1)}s + + )} +
+
+
+
+          
+            {displayedCode}
+            {state === "streaming" && (
+              
+            )}
+          
+        
+
+
+ ); +} + +export function StreamingComparison() { + const [state, setState] = useState("idle"); + const [jsonChars, setJsonChars] = useState(0); + const [openuiChars, setOpenuiChars] = useState(0); + const [jsonTokens, setJsonTokens] = useState(0); + const [openuiTokens, setOpenuiTokens] = useState(0); + const [elapsed, setElapsed] = useState(0); + const [jsonDone, setJsonDone] = useState(false); + const [openuiDone, setOpenuiDone] = useState(false); + const rafRef = useRef(0); + const startRef = useRef(0); + const sectionRef = useRef(null); + const hasAutoPlayed = useRef(false); + + const reset = useCallback(() => { + cancelAnimationFrame(rafRef.current); + setState("idle"); + setJsonChars(0); + setOpenuiChars(0); + setJsonTokens(0); + setOpenuiTokens(0); + setElapsed(0); + setJsonDone(false); + setOpenuiDone(false); + }, []); + + const play = useCallback(() => { + reset(); + setState("streaming"); + startRef.current = performance.now(); + + const tick = (now: number) => { + const dt = (now - startRef.current) / 1000; + setElapsed(dt); + + const jsonProgress = Math.min(1, dt / JSON_DURATION); + const openuiProgress = Math.min(1, dt / OPENUI_DURATION); + + setJsonChars(Math.floor(jsonProgress * JSON_CODE.length)); + setOpenuiChars(Math.floor(openuiProgress * OPENUI_CODE.length)); + setJsonTokens(Math.floor(jsonProgress * JSON_TOKENS)); + setOpenuiTokens(Math.floor(openuiProgress * OPENUI_TOKENS)); + + if (openuiProgress >= 1) setOpenuiDone(true); + if (jsonProgress >= 1) setJsonDone(true); + + if (jsonProgress < 1) { + rafRef.current = requestAnimationFrame(tick); + } else { + setState("done"); + } + }; + + rafRef.current = requestAnimationFrame(tick); + }, [reset]); + + // Auto-play when section scrolls into view + useEffect(() => { + const el = sectionRef.current; + if (!el) return; + + const observer = new IntersectionObserver( + ([entry]) => { + if (entry.isIntersecting && !hasAutoPlayed.current) { + hasAutoPlayed.current = true; + play(); + } + }, + { threshold: 0.3 }, + ); + + observer.observe(el); + return () => observer.disconnect(); + }, [play]); + + useEffect(() => { + return () => cancelAnimationFrame(rafRef.current); + }, []); + + return ( +
+

+ Same UI component, both streaming at {TOKEN_RATE} tokens/sec. OpenUI Lang finishes in{" "} + {OPENUI_DURATION.toFixed(1)}s vs JSON's{" "} + {JSON_DURATION.toFixed(1)}s —{" "} + {Math.round((1 - OPENUI_TOKENS / JSON_TOKENS) * 100)}% fewer tokens. +

+ +
+ + +
+ + {state === "done" && ( +
+

+ OpenUI Lang completed in {OPENUI_DURATION.toFixed(1)}s vs JSON's{" "} + {JSON_DURATION.toFixed(1)}s —{" "} + {(JSON_DURATION / OPENUI_DURATION).toFixed(1)}x faster with{" "} + {JSON_TOKENS - OPENUI_TOKENS} fewer tokens. +

+ +
+ )} +
+ ); +} diff --git a/docs/components/docs-navbar.tsx b/docs/components/docs-navbar.tsx index 61f41a8cf..2ac73f14b 100644 --- a/docs/components/docs-navbar.tsx +++ b/docs/components/docs-navbar.tsx @@ -11,7 +11,6 @@ import { OpenUILogo, ThesysLogo } from "./brand-logo"; import { ThemeToggle } from "./theme-toggle"; const tabs = [ - { title: "Introduction", url: "/docs/introduction" }, { title: "OpenUI Lang", url: "/docs/openui-lang" }, { title: "Chat", url: "/docs/chat" }, { title: "API Reference", url: "/docs/api-reference" }, @@ -77,7 +76,7 @@ export function DocsNavbar({ showSidebarToggle = false }: { showSidebarToggle?: return (
{/* Top row: logo left, actions right */} -
+
{showSidebarToggle && ( {tab.title} diff --git a/docs/components/docs-route-layout.tsx b/docs/components/docs-route-layout.tsx index d74316766..c77e22740 100644 --- a/docs/components/docs-route-layout.tsx +++ b/docs/components/docs-route-layout.tsx @@ -4,7 +4,6 @@ import { DocsNavbar } from "@/components/docs-navbar"; import { baseOptions, siteConfig } from "@/lib/layout.shared"; import { DocsLayout } from "fumadocs-ui/layouts/docs"; import Link from "next/link"; -import { usePathname } from "next/navigation"; import type { ReactNode } from "react"; function SidebarBannerLink({ @@ -39,18 +38,14 @@ type DocsRouteLayoutProps = { }; export function DocsRouteLayout({ tree, children }: DocsRouteLayoutProps) { - const pathname = usePathname(); - const isIntroductionRoute = - pathname === "/docs/introduction" || pathname.startsWith("/docs/introduction/"); - return ( }} + nav={{ component: }} sidebar={{ tabs: false, - enabled: !isIntroductionRoute, + enabled: true, collapsible: false, banner: (
diff --git a/docs/components/overview-components/chat-modal.css b/docs/components/overview-components/chat-modal.css new file mode 100644 index 000000000..c8f3df9e2 --- /dev/null +++ b/docs/components/overview-components/chat-modal.css @@ -0,0 +1,91 @@ +.chat-modal-overlay { + position: fixed; + inset: 0; + z-index: 200; + background: rgba(0, 0, 0, 0.5); + backdrop-filter: blur(4px); + display: flex; + align-items: center; + justify-content: center; + padding: 24px; + animation: chat-modal-overlay-in 0.15s ease; +} + +@keyframes chat-modal-overlay-in { + from { opacity: 0; } + to { opacity: 1; } +} + +.chat-modal-container { + position: relative; + width: min(1400px, 95vw); + height: 90vh; + border-radius: 16px; + overflow: hidden; + box-shadow: 0 25px 50px -12px rgba(0, 0, 0, 0.25); + animation: chat-modal-in 0.2s ease; +} + +@keyframes chat-modal-in { + from { opacity: 0; transform: translateY(12px) scale(0.98); } + to { opacity: 1; transform: translateY(0) scale(1); } +} + +.chat-modal-close { + position: absolute; + top: 12px; + right: 12px; + z-index: 210; + background: rgba(128, 128, 128, 0.2); + backdrop-filter: blur(8px); + border: none; + border-radius: 50%; + width: 36px; + height: 36px; + display: flex; + align-items: center; + justify-content: center; + cursor: pointer; + color: inherit; + transition: background 0.15s; +} + +.chat-modal-close:hover { + background: rgba(128, 128, 128, 0.4); +} + +.chat-modal-body { + width: 100%; + height: 100%; +} + +/* Override Shell container sizing to fit within the modal */ +.chat-modal-body .openui-shell-container { + height: 100% !important; + width: 100% !important; +} + +/* Hide the sidebar in the modal */ +.chat-modal-body .openui-shell-sidebar-container { + display: none !important; +} + +/* ─── Mobile ─────────────────────────────────────────────────────────────────── */ +@media (max-width: 768px) { + .chat-modal-overlay { + padding: 0; + align-items: flex-end; + } + + .chat-modal-container { + width: 100vw; + height: 95dvh; + border-radius: 16px 16px 0 0; + animation: chat-modal-mobile-in 0.35s cubic-bezier(0.22, 1, 0.36, 1); + } +} + +@keyframes chat-modal-mobile-in { + from { transform: translateY(100%); } + to { transform: translateY(0); } +} diff --git a/docs/components/overview-components/chat-modal.tsx b/docs/components/overview-components/chat-modal.tsx new file mode 100644 index 000000000..a6003a8d5 --- /dev/null +++ b/docs/components/overview-components/chat-modal.tsx @@ -0,0 +1,78 @@ +"use client"; + +import "@openuidev/react-ui/components.css"; +import "./chat-modal.css"; + +import { openAIAdapter, openAIMessageFormat } from "@openuidev/react-headless"; +import { FullScreen } from "@openuidev/react-ui"; +import { openuiChatLibrary, openuiChatPromptOptions } from "@openuidev/react-ui/genui-lib"; +import { X } from "lucide-react"; +import { useTheme } from "next-themes"; +import { useCallback, useEffect } from "react"; +import { createPortal } from "react-dom"; + +const systemPrompt = openuiChatLibrary.prompt(openuiChatPromptOptions); + +interface ChatModalProps { + onClose: () => void; +} + +export function ChatModal({ onClose }: ChatModalProps) { + const { resolvedTheme } = useTheme(); + + const handleKey = useCallback( + (e: KeyboardEvent) => { + if (e.key === "Escape") onClose(); + }, + [onClose], + ); + + useEffect(() => { + document.addEventListener("keydown", handleKey); + document.body.style.overflow = "hidden"; + return () => { + document.removeEventListener("keydown", handleKey); + document.body.style.overflow = ""; + }; + }, [handleKey]); + + return createPortal( +
+
e.stopPropagation()}> + +
+ { + return fetch("/api/chat", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + messages: openAIMessageFormat.toApi(messages), + systemPrompt, + }), + signal: abortController.signal, + }); + }} + streamProtocol={openAIAdapter()} + componentLibrary={openuiChatLibrary} + agentName="OpenUI Chat" + theme={{ mode: (resolvedTheme as "light" | "dark") ?? "light" }} + conversationStarters={{ + variant: "short", + options: [ + { displayText: "Weather in Tokyo", prompt: "What's the weather like in Tokyo right now?" }, + { displayText: "AAPL stock price", prompt: "What's the current Apple stock price?" }, + { displayText: "Contact form", prompt: "Build me a contact form with name, email, topic, and message fields." }, + { displayText: "Data table", prompt: "Show me a table of the top 5 programming languages by popularity with year created." }, + ], + }} + /> +
+
+
, + document.body, + ); +} diff --git a/docs/components/overview-components/code-block.tsx b/docs/components/overview-components/code-block.tsx index f1f082a93..852a55b45 100644 --- a/docs/components/overview-components/code-block.tsx +++ b/docs/components/overview-components/code-block.tsx @@ -40,7 +40,7 @@ export function CodeBlock({
)}
-        {code}
+        {code}
       
); diff --git a/docs/components/overview-components/overview-page.tsx b/docs/components/overview-components/overview-page.tsx index 5ff7e79af..713977178 100644 --- a/docs/components/overview-components/overview-page.tsx +++ b/docs/components/overview-components/overview-page.tsx @@ -12,10 +12,14 @@ import { FeatureCard, FeatureCards, } from "@/components/overview-components"; -import { Code2, MessageSquare, Package } from "lucide-react"; +import { ArrowUpRight, Code2, MessageSquare, Package } from "lucide-react"; +import { useState } from "react"; +import { ChatModal } from "./chat-modal"; import { genuiOutput } from "./genui"; export function OverviewPage() { + const [isChatModalOpen, setIsChatModalOpen] = useState(false); + return (
{/* Introduction */} @@ -217,6 +221,35 @@ export function AssistantMessage({ content, isStreaming }) { /> + {/* Interactive Demo */} +
setIsChatModalOpen(true)} + role="button" + tabIndex={0} + onKeyDown={(e) => e.key === "Enter" && setIsChatModalOpen(true)} + > +
+ OpenUI Chat Demo - Click to try it live +
+
+
+
+

Try it out live

+

+ Live interactive demo of OpenUI Chat in action +

+
+ +
+
+ + {isChatModalOpen && setIsChatModalOpen(false)} />} +
{props.title}
, +}); + +export const library = createLibrary({ + components: [Card, ...otherComponents], +}); + +export const systemPrompt = library.prompt(); // Generated system prompt +`, + }, + { + title: "LLM Generates OpenUI Syntax", + description: "LLM outputs token-efficient, line-oriented syntax", + code: `root = Stack([welcomeCard]) +welcomeCard = Card([welcomeHeader, welcomeBody], "card") +welcomeHeader = CardHeader("Welcome", "Get started with our platform") +welcomeBody = Stack([signupForm], "column", "m") +signupForm = Form("signup", [nameField, emailField], actions) +nameField = FormControl("Name", Input("name", "Your name", "text", ["required", "minLength:2"])) +emailField = FormControl("Email", Input("email", "you@example.com", "email", ["required", "email"])) +actions = Buttons([signUpBtn, learnMoreBtn], "row") +signUpBtn = Button("Sign up", "submit:signup", "primary") +learnMoreBtn = Button("Learn more", "action:learn_more", "secondary") +`, + }, +]; + + +export const HowItWorks = () => { + const [activeStep, setActiveStep] = useState(0); + return ( +
+

+ Click through each step to see the complete workflow +

+ +
+ {steps.map((step, index) => ( + + ))} +
+ + +
+
+
+ {activeStep + 1} +
+
+

{steps[activeStep].title}

+

+ {steps[activeStep].description} +

+
+
+
+ + + +
+ setActiveStep(Math.max(0, activeStep - 1))} + disabled={activeStep === 0} + > + Previous + + setActiveStep(Math.min(steps.length - 1, activeStep + 1))} + disabled={activeStep === steps.length - 1} + > + Next Step + +
+
+
+ ); +} \ No newline at end of file diff --git a/docs/content/docs/openui-lang/components/lang-example.tsx b/docs/content/docs/openui-lang/components/lang-example.tsx new file mode 100644 index 000000000..97c21a067 --- /dev/null +++ b/docs/content/docs/openui-lang/components/lang-example.tsx @@ -0,0 +1,123 @@ +"use client"; + +import { + CodeBlock, + SimpleCard, + Tabs, + TabsContent, + TabsList, + TabsTrigger, +} from "@/components/overview-components"; +import { genuiOutput } from "@/components/overview-components/genui"; +import { openuiLibrary } from "@openuidev/react-ui"; +import { Renderer } from "@openuidev/react-lang"; + +const renderableOutput = `root = Stack([welcomeCard]) +welcomeCard = Card([welcomeHeader, welcomeBody]) +welcomeHeader = CardHeader("Welcome", "Get started with our platform") +welcomeBody = Stack([signupForm], "column", "m") +signupForm = Form("signup", actions, [nameField, emailField]) +nameField = FormControl("Name", Input("name", "Your name", "text", { required: true, minLength: 2 })) +emailField = FormControl("Email", Input("email", "you@example.com", "email", { required: true, email: true })) +actions = Buttons([signUpBtn, learnMoreBtn], "row") +signUpBtn = Button("Sign up", { type: "continue_conversation", context: "signup" }, "primary") +learnMoreBtn = Button("Learn more", { type: "continue_conversation", context: "learn_more" }, "secondary") +`; + +export const LangExample = () => { + return ( +
+ {/* Left: Code Tabs */} + + + + Define Lib + + + Render Code + + + System Prompt + + + LLM Output + + + + +
{renderNode(props.children)}
, + ... +}); + +export const myLibrary = createLibrary({ + components: [MyCard, ...otherComponents], +});`} + /> +
+ + + + +
+ ); +}`} + /> + + +
Generate System Prompt with CLI
+ +
Send system prompt to the LLM
+ +
+ + + + + + +
+ Output Preview +
+
+ +
+
+
+ ) +} diff --git a/docs/content/docs/openui-lang/components/try-it-out.tsx b/docs/content/docs/openui-lang/components/try-it-out.tsx new file mode 100644 index 000000000..e4b7178b2 --- /dev/null +++ b/docs/content/docs/openui-lang/components/try-it-out.tsx @@ -0,0 +1,39 @@ +"use client"; +import { useState } from "react"; +import { ArrowUpRight } from "lucide-react"; +import Image from "next/image"; +import { ChatModal } from "@/components/overview-components/chat-modal"; + +export const TryItOut = () => { + const [isChatModalOpen, setIsChatModalOpen] = useState(false); + + return (
setIsChatModalOpen(true)} + role="button" + tabIndex={0} + onKeyDown={(e) => e.key === "Enter" && setIsChatModalOpen(true)} + > +
+ OpenUI Chat Demo - Click to try it live +
+
+
+
+

Try it out live

+

+ Live interactive demo of OpenUI Chat in action +

+
+ +
+ {isChatModalOpen && setIsChatModalOpen(false)} />} +
+ ); +} \ No newline at end of file diff --git a/docs/content/docs/openui-lang/index.mdx b/docs/content/docs/openui-lang/index.mdx index 625c83b9b..8d678f368 100644 --- a/docs/content/docs/openui-lang/index.mdx +++ b/docs/content/docs/openui-lang/index.mdx @@ -1,75 +1,58 @@ --- -title: OpenUI Lang -description: Streaming-first UI language for typed component rendering. +title: Introduction --- -OpenUI Lang is a compact, line-oriented language for LLM-generated UI. - -It is designed for: - -- Streaming output (`identifier = Expression` per line) -- Token efficiency vs verbose JSON payloads -- Safe rendering against your registered component library - -## How it works - -1. Define components with `defineComponent(...)` and group them with `createLibrary(...)`. -2. Generate instructions with `library.prompt(...)` and send them as your system message. -3. LLM returns OpenUI Lang text. -4. `` parses and renders React components from that output. - -## Why OpenUI Lang - -- Renders progressively while text streams. -- Uses positional arguments derived from your Zod object key order. -- Drops invalid nodes with missing required props instead of rendering broken UI. - - - The parser currently validates required field presence/nullability from schema metadata. It does - not perform full runtime Zod type validation of every prop value. - - -## Minimal example - -```tsx -import "@openuidev/react-ui/components.css"; -import { Renderer } from "@openuidev/react-lang"; -import { openuiLibrary, openuiPromptOptions } from "@openuidev/react-ui"; - -const systemPrompt = openuiLibrary.prompt(openuiPromptOptions); - -; -``` - -Example OpenUI Lang output: - -```text -root = Stack([title, form]) -title = TextContent("Contact", "large-heavy") -form = Form("contact", [nameField, emailField], actions) -nameField = FormControl("Name", Input("name", "Your name", "text", ["required"])) -emailField = FormControl("Email", Input("email", "you@example.com", "email", ["required", "email"])) -actions = Buttons([submitBtn, cancelBtn], "row") -submitBtn = Button("Submit", "submit:contact", "primary") -cancelBtn = Button("Cancel", "action:cancel_contact", "secondary") -``` - -## Next Steps - - - - Start with the OpenUI library and render immediately. - - - Build custom component contracts with Zod. - - - Generate and customize model instructions. - - - Parse, stream, and handle actions. - - - Formal grammar and parser behavior. - - +import { StreamingComparison } from "@/app/docs/openui-lang/streaming-comparison"; +import { TryItOut } from "./components/try-it-out"; +import { LangExample } from "./components/lang-example"; + +OpenUI is a framework for building Generative UI with a compact, streaming-first language that is up to **[67% more token-efficient](/docs/openui-lang/benchmarks)** than JSON, resulting in faster AI-generated interfaces. + +## What is Generative UI? + + +Most AI applications are limited to returning text (as markdown) or rendering pre-built UI responses. Markdown isn't interactive, and pre-built responses are rigid (they don't adapt to the context of the conversation). + + +Generative UI fundamentally changes this relationship. Instead of merely providing content, the AI composes the interface itself. It dynamically selects, configures, and composes components from a predefined library to create a purpose-built interface tailored to the user's immediate request, be it an interactive chart, a complex form, or a multi-tab dashboard. + + +## OpenUI Lang + +OpenUI Lang is a compact, line-oriented language designed specifically for Large Language Models (LLMs) to generate user interfaces. It serves as a more efficient, predictable, and stream-friendly alternative to verbose formats like JSON. + +### Why a New Language? + +While JSON is a common data interchange format, it has significant drawbacks when streamed directly from an LLM for UI generation. And there are mutliple implementations around it, like Vercel [JSON-Render](https://json-render.dev/) and [A2UI](https://a2ui.org/). + +OpenUI Lang was created to solve these core issues: + +- **Token Efficiency:** JSON is extremely verbose. Keys like `"component"`, `"props"`, and `"children"` are repeated for every single element, consuming a large number of tokens. This directly increases API costs and latency. OpenUI Lang uses a concise, positional syntax that drastically reduces the token count. Benchmarks show it is up to **[67% more token-efficient](/docs/openui-lang/benchmarks)** than JSON. + +- **Streaming-First Design:** The language is line-oriented (`identifier = Expression`), making it trivial to parse and render progressively. As each line arrives from the model, a new piece of the UI can be rendered immediately. This provides a superior user experience with much better perceived performance compared to waiting for a complete JSON object to download and parse. + +- **Robustness:** The LLMs are unpredictable. They can hallucinate component names, produce invalid structures. OpenUI Lang is designed to be robust against this by validating the output and drops the invalid portion of response from rendering. + + + +## How It Works + +Architecture diagram + +Here is a breakdown of Generative UI workflow: + +1. **User Query:** The process begins when a user interacts with your application. In this example, they ask, "What did I spend on last month?". + +2. **Backend Processing:** The user's query is sent to your backend. Backend applies its own business logic (e.g., authenticating the user, fetching spending data from a database) and prepares a request for an LLM provider. + +3. **System Prompt to LLM:** Backend sends its system prompt to the request along with OpenUI Lang spec prompt. + +4. **LLM Generates OpenUI Lang:** The LLM provider (like OpenAI, Anthropic, etc.) processes the prompt. Instead of returning plain text or JSON, it generates a response in **OpenUI Lang**, a token-efficient syntax designed for this purpose (e.g., `root = Stack([chart])`). + +5. **Rendering:** On the client side, the `@openuidev/lang-react` library's `` component receives and parses the OpenUI Lang stream in real-time. As each line arrives, it safely maps the code to the corresponding React components you defined in your library and renders them. + +The final result is a rich, native UI—like the "Total expenses" card and interactive pie chart—that was dynamically generated by the AI, streamed efficiently, and rendered safely on the client's device. + +## Usage Example + + \ No newline at end of file diff --git a/docs/content/docs/openui-lang/meta.json b/docs/content/docs/openui-lang/meta.json index 2b2c934ed..129b06186 100644 --- a/docs/content/docs/openui-lang/meta.json +++ b/docs/content/docs/openui-lang/meta.json @@ -1,8 +1,7 @@ { - "title": "OpenUI Lang", + "title": "OpenUI", "root": true, "pages": [ - "---Getting Started---", "index", "quickstart", "---Core Concepts---", diff --git a/docs/content/docs/openui-lang/quickstart.mdx b/docs/content/docs/openui-lang/quickstart.mdx index 347f8c1a9..4fd33290b 100644 --- a/docs/content/docs/openui-lang/quickstart.mdx +++ b/docs/content/docs/openui-lang/quickstart.mdx @@ -3,74 +3,26 @@ title: Quick Start description: Use the OpenUI library to render OpenUI Lang immediately. --- -This is the fastest path: use `openuiLibrary` from `@openuidev/react-ui` and render LLM output with `@openuidev/react-lang`. -## Installation +#### Bootstrap a GenUI Chat app ```bash -npm install @openuidev/react-lang @openuidev/react-ui +npx @openuidev/cli@latest create --name genui-chat-app +cd genui-chat-app ``` -## Render output +#### Add your API key -```tsx -import "@openuidev/react-ui/components.css"; -import { Renderer } from "@openuidev/react-lang"; -import { openuiLibrary } from "@openuidev/react-ui"; - -; -``` - -## Generate prompt - -```ts -import { openuiLibrary, openuiPromptOptions } from "@openuidev/react-ui"; - -const systemPrompt = openuiLibrary.prompt(openuiPromptOptions); +```bash +echo "OPENAI_API_KEY=sk-your-key-here" > .env ``` -Use `systemPrompt` as your model system message. - -## Backend example +#### Start the dev server -```ts -import OpenAI from "openai"; -import { openuiLibrary, openuiPromptOptions } from "@openuidev/react-ui"; - -const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); - -export async function POST(req: Request) { - const { messages } = await req.json(); - - const response = await client.chat.completions.create({ - model: "gpt-5.2", - stream: true, - messages: [ - { role: "system", content: openuiLibrary.prompt(openuiPromptOptions) }, - ...messages, - ], - }); - - return new Response("stream here"); -} +```bash +npm run dev ``` -## Included component families - -- Layout: `Stack`, `Tabs`, `Accordion`, `Steps`, `Carousel` -- Content: `Card`, `CardHeader`, `TextContent`, `Callout`, `Image`, `CodeBlock` -- Data: `Table`, `TagBlock` -- Charts: `BarChart`, `LineChart`, `AreaChart`, `RadarChart`, `HorizontalBarChart`, `PieChart`, `RadialChart`, `SingleStackedBarChart`, `ScatterChart` -- Forms: `Form`, `FormControl`, `Input`, `Select`, `DatePicker`, `Slider` -- Actions: `Button` - -## Next Steps +The generated app wires up a predefined component library in `src/app/page.tsx`. - - - Build custom component contracts. - - - Handle streaming and actions. - - +Follow guide: [Define Your Components](/docs/openui-lang/defining-components) to learn how to create your own component library. diff --git a/docs/next.config.mjs b/docs/next.config.mjs index 3266b317f..aa47a78e9 100644 --- a/docs/next.config.mjs +++ b/docs/next.config.mjs @@ -9,7 +9,7 @@ const withMDX = createMDX(); /** @type {import('next').NextConfig} */ const config = { serverExternalPackages: ["@takumi-rs/image-response"], - transpilePackages: ["@openuidev/react-ui", "@openuidev/react-lang"], + transpilePackages: ["@openuidev/react-ui", "@openuidev/react-lang", "@openuidev/react-headless"], turbopack: { root: dirname(dirname(__dirname)), }, @@ -19,7 +19,7 @@ const config = { return [ { source: "/docs", - destination: "/docs/introduction", + destination: "/docs/openui-lang", permanent: false, }, ]; diff --git a/docs/package.json b/docs/package.json index 259dcabd9..7a72c7254 100644 --- a/docs/package.json +++ b/docs/package.json @@ -16,6 +16,7 @@ "@openuidev/react-lang": "workspace:^", "@openuidev/react-headless": "workspace:^", "@openuidev/react-ui": "workspace:^", + "openai": "^6.22.0", "@takumi-rs/image-response": "^0.68.17", "fumadocs-core": "16.6.5", "fumadocs-mdx": "14.2.8", diff --git a/docs/public/images/openui-lang/compare.png b/docs/public/images/openui-lang/compare.png new file mode 100644 index 000000000..888fc2701 Binary files /dev/null and b/docs/public/images/openui-lang/compare.png differ diff --git a/docs/public/images/openui-lang/openui-chart-flow.png b/docs/public/images/openui-lang/openui-chart-flow.png new file mode 100644 index 000000000..22b5cc6a1 Binary files /dev/null and b/docs/public/images/openui-lang/openui-chart-flow.png differ diff --git a/packages/openui-cli/src/commands/create-chat-app.ts b/packages/openui-cli/src/commands/create-chat-app.ts index f038f396e..6d9f5237d 100644 --- a/packages/openui-cli/src/commands/create-chat-app.ts +++ b/packages/openui-cli/src/commands/create-chat-app.ts @@ -102,5 +102,20 @@ export async function runCreateChatApp(options: CreateChatAppOptions): Promise + ` +Done! +Get started: + +cd ${name} + +touch .env + +Add your API key to .env: +OPENAI_API_KEY=sk-your-key-here + +${devCmd} run dev +`; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 73be71007..4d9ad148e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -80,6 +80,9 @@ importers: next-themes: specifier: ^0.4.6 version: 0.4.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + openai: + specifier: ^6.22.0 + version: 6.22.0(ws@8.18.2)(zod@4.3.6) posthog-js: specifier: ^1.358.1 version: 1.358.1