|
71 | 71 |
|
72 | 72 | // --- helpers --- |
73 | 73 | const bool = v => (v == null ? undefined : (v ? 'true' : 'false')); |
| 74 | + const sleep = ms => new Promise(res => setTimeout(res, ms)); |
74 | 75 | function base64FromArrayBuffer(ab) { |
75 | 76 | const bytes = new Uint8Array(ab); |
76 | 77 | let binary = ''; |
|
83 | 84 | } |
84 | 85 |
|
85 | 86 | // --- image.js --- |
86 | | - async function image(prompt, { model, seed, width, height, image: imgUrl, nologo, private: priv, enhance, safe, referrer } = {}, client = getDefaultClient()) { |
| 87 | + async function image(prompt, { model, seed, width, height, image: imgUrl, nologo, private: priv, enhance, safe, referrer, json, retries = 5, retryDelayMs = 1000 } = {}, client = getDefaultClient()) { |
87 | 88 | const url = `${client.imageBase}/prompt/${encodeURIComponent(prompt)}`; |
88 | 89 | const params = {}; |
89 | 90 | if (model) params.model = model; |
|
96 | 97 | if (enhance != null) params.enhance = bool(enhance); |
97 | 98 | if (safe != null) params.safe = bool(safe); |
98 | 99 | if (referrer) params.referrer = referrer; |
99 | | - const r = await client.get(url, { params }); |
| 100 | + if (json) params.json = 'true'; |
| 101 | + const headers = json ? { Accept: 'application/json' } : {}; |
| 102 | + const r = await client.get(url, { params, headers }); |
100 | 103 | if (!r.ok) throw new Error(`image error ${r.status}`); |
| 104 | + const ct = r.headers.get('content-type') ?? ''; |
| 105 | + if (ct.includes('application/json')) { |
| 106 | + const data = await r.json(); |
| 107 | + if (json) return data; |
| 108 | + if (data?.url) { |
| 109 | + const ir = await fetch(data.url); |
| 110 | + if (ir.ok) return await ir.blob(); |
| 111 | + } |
| 112 | + if (retries > 0) { |
| 113 | + await sleep(retryDelayMs); |
| 114 | + return await image(prompt, { model, seed, width, height, image: imgUrl, nologo, private: priv, enhance, safe, referrer, json, retries: retries - 1, retryDelayMs }, client); |
| 115 | + } |
| 116 | + throw new Error('image pending'); |
| 117 | + } |
101 | 118 | return await r.blob(); |
102 | 119 | } |
103 | 120 | async function imageModels(client = getDefaultClient()) { |
|
131 | 148 | return await r.text(); |
132 | 149 | } |
133 | 150 | } |
134 | | - async function chat({ model, messages, seed, temperature, top_p, presence_penalty, frequency_penalty, max_tokens, stream, private: priv, tools, tool_choice, referrer }, client = getDefaultClient()) { |
| 151 | + async function chat({ model, messages, seed, temperature, top_p, presence_penalty, frequency_penalty, max_tokens, stream, private: priv, tools, tool_choice, referrer, json }, client = getDefaultClient()) { |
135 | 152 | const url = `${client.textBase}/openai`; |
136 | 153 | const body = { model, messages }; |
137 | 154 | if (seed != null) body.seed = seed; |
|
144 | 161 | if (tools) body.tools = tools; |
145 | 162 | if (tool_choice) body.tool_choice = tool_choice; |
146 | 163 | if (referrer) body.referrer = referrer; |
| 164 | + if (json) body.json = true; |
147 | 165 | if (stream) { |
148 | 166 | body.stream = true; |
149 | 167 | const r = await client.postJson(url, body, { headers: { 'Accept': 'text/event-stream' } }); |
|
266 | 284 | async function listTextModels(client) { return await textModels(client); } |
267 | 285 | async function listAudioVoices(client) { const models = await textModels(client); return models?.['openai-audio']?.voices ?? []; } |
268 | 286 |
|
| 287 | + async function modelCapabilities(client = getDefaultClient()) { |
| 288 | + const [image, text] = await Promise.all([ |
| 289 | + imageModels(client).catch(() => ({})), |
| 290 | + textModels(client).catch(() => ({})), |
| 291 | + ]); |
| 292 | + return { image, text, audio: text?.['openai-audio'] ?? {} }; |
| 293 | + } |
| 294 | + |
269 | 295 | // --- pipeline.js --- |
270 | 296 | class Context extends Map {} |
271 | 297 | class Pipeline { constructor() { this.steps = []; } step(s) { this.steps.push(s); return this; } async execute({ client, context = new Context() } = {}) { for (const s of this.steps) await s.run({ client, context }); return context; } } |
|
282 | 308 | const api = { |
283 | 309 | configure, |
284 | 310 | image, text, chat, search, tts, stt, vision, |
285 | | - imageModels, textModels, imageFeed, textFeed, |
| 311 | + imageModels, textModels, imageFeed, textFeed, modelCapabilities, |
286 | 312 | tools: { functionTool, ToolBox, chatWithTools }, |
287 | 313 | mcp: { serverName, toolDefinitions, generateImageUrl, generateImageBase64, listImageModels, listTextModels, listAudioVoices }, |
288 | 314 | pipeline: { Context, Pipeline, TextGetStep, ImageStep, TtsStep, VisionUrlStep }, |
|
0 commit comments