From 128a7868d2ed8615348e364165a4c294ed04e281 Mon Sep 17 00:00:00 2001 From: sawka Date: Fri, 5 Dec 2025 17:44:49 -0800 Subject: [PATCH 01/13] first cut at v0.13 release notes --- docs/docs/releasenotes.mdx | 27 +++++++++++++++++++++++++++ package-lock.json | 4 ++-- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/docs/docs/releasenotes.mdx b/docs/docs/releasenotes.mdx index f14b1252c7..b540906e06 100644 --- a/docs/docs/releasenotes.mdx +++ b/docs/docs/releasenotes.mdx @@ -6,6 +6,33 @@ sidebar_position: 200 # Release Notes +### v0.13.0 — Dec 8, 2025 + +**Wave v0.13 Brings Local AI Support, BYOK, and Unified Configuration** + +Wave v0.13 is a major release that opens up Wave AI to local models, third-party providers, and bring-your-own-key (BYOK) configurations. This release also includes a completely redesigned configuration system and several terminal improvements. + +**Local AI & BYOK Support:** +- **OpenAI-Compatible API** - Wave now supports any provider or local server using the `/v1/chat/completions` endpoint, enabling use of Ollama, LM Studio, vLLM, OpenRouter, and countless other local and hosted models +- **Google Gemini Integration** - Native support for Google's Gemini models with a dedicated API adapter +- **Provider Presets** - Simplified configuration with built-in presets for OpenAI, OpenRouter, Google, Azure, and custom endpoints +- **Multiple AI Modes** - Easily switch between different models and providers with a unified interface +- See the new [Wave AI Modes documentation](https://docs.waveterm.dev/waveai-modes) for configuration examples and setup guides + +**Unified Configuration Widget:** +- **New Config Interface** - Replaced the basic JSON editor with a dedicated configuration widget accessible from the sidebar +- **Better Organization** - Browse and edit different configuration types (general settings, AI modes, secrets) with improved validation and error handling +- **Integrated Secrets Management** - Access Wave's secret store directly from the config widget for secure credential management + +**Terminal Improvements:** +- **Bracketed Paste Mode** - Now enabled by default to improve multi-line paste behavior and compatibility with tools like Claude Code +- **Windows Paste Fix** - Ctrl+V now works as a standard paste accelerator on Windows +- **SSH Password Management** - Store SSH connection passwords in Wave's secret store to avoid re-typing credentials + +**Other Changes:** +- Package updates and dependency upgrades +- Various bug fixes and stability improvements + ### v0.12.5 — Nov 24, 2025 Quick patch release to fix paste behavior on Linux (prevent raw HTML from getting pasted to the terminal). diff --git a/package-lock.json b/package-lock.json index 1686b1c0c5..0bddd41d75 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "waveterm", - "version": "0.12.5", + "version": "0.13.0-beta.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "waveterm", - "version": "0.12.5", + "version": "0.13.0-beta.0", "hasInstallScript": true, "license": "Apache-2.0", "workspaces": [ From 3e0658a19c5536281e8f41cd6397ef30266795e0 Mon Sep 17 00:00:00 2001 From: sawka Date: Fri, 5 Dec 2025 17:53:16 -0800 Subject: [PATCH 02/13] add provider and local to wave ai telemetry --- pkg/aiusechat/uctypes/uctypes.go | 2 ++ pkg/aiusechat/usechat.go | 17 +++++++++++++++++ pkg/telemetry/telemetrydata/telemetrydata.go | 2 ++ 3 files changed, 21 insertions(+) diff --git a/pkg/aiusechat/uctypes/uctypes.go b/pkg/aiusechat/uctypes/uctypes.go index 7d3ea45c13..3d00f1ed88 100644 --- a/pkg/aiusechat/uctypes/uctypes.go +++ b/pkg/aiusechat/uctypes/uctypes.go @@ -332,6 +332,8 @@ type AIMetrics struct { WidgetAccess bool `json:"widgetaccess"` ThinkingLevel string `json:"thinkinglevel,omitempty"` AIMode string `json:"aimode,omitempty"` + AIProvider string `json:"aiprovider,omitempty"` + IsLocal bool `json:"islocal,omitempty"` } type AIFunctionCallInput struct { diff --git a/pkg/aiusechat/usechat.go b/pkg/aiusechat/usechat.go index 0ec2fa7d34..06364da256 100644 --- a/pkg/aiusechat/usechat.go +++ b/pkg/aiusechat/usechat.go @@ -60,6 +60,14 @@ func getSystemPrompt(apiType string, model string, isBuilder bool) []string { return []string{basePrompt} } +func isLocalEndpoint(endpoint string) bool { + if endpoint == "" { + return false + } + endpointLower := strings.ToLower(endpoint) + return strings.Contains(endpointLower, "localhost") || strings.Contains(endpointLower, "127.0.0.1") +} + func getWaveAISettings(premium bool, builderMode bool, rtInfo waveobj.ObjRTInfo) (*uctypes.AIOptsType, error) { maxTokens := DefaultMaxTokens if builderMode { @@ -353,6 +361,11 @@ func RunAIChat(ctx context.Context, sseHandler *sse.SSEHandlerCh, backend UseCha defer activeChats.Delete(chatOpts.ChatId) stepNum := chatstore.DefaultChatStore.CountUserMessages(chatOpts.ChatId) + aiProvider := chatOpts.Config.Provider + if aiProvider == "" { + aiProvider = uctypes.AIProvider_Custom + } + isLocal := isLocalEndpoint(chatOpts.Config.Endpoint) metrics := &uctypes.AIMetrics{ ChatId: chatOpts.ChatId, StepNum: stepNum, @@ -364,6 +377,8 @@ func RunAIChat(ctx context.Context, sseHandler *sse.SSEHandlerCh, backend UseCha ToolDetail: make(map[string]int), ThinkingLevel: chatOpts.Config.ThinkingLevel, AIMode: chatOpts.Config.AIMode, + AIProvider: aiProvider, + IsLocal: isLocal, } firstStep := true var cont *uctypes.WaveContinueResponse @@ -563,6 +578,8 @@ func sendAIMetricsTelemetry(ctx context.Context, metrics *uctypes.AIMetrics) { WaveAIWidgetAccess: metrics.WidgetAccess, WaveAIThinkingLevel: metrics.ThinkingLevel, WaveAIMode: metrics.AIMode, + WaveAIProvider: metrics.AIProvider, + WaveAIIsLocal: metrics.IsLocal, }) _ = telemetry.RecordTEvent(ctx, event) } diff --git a/pkg/telemetry/telemetrydata/telemetrydata.go b/pkg/telemetry/telemetrydata/telemetrydata.go index 7dd7bffdb9..7f186e71ca 100644 --- a/pkg/telemetry/telemetrydata/telemetrydata.go +++ b/pkg/telemetry/telemetrydata/telemetrydata.go @@ -148,6 +148,8 @@ type TEventProps struct { WaveAIWidgetAccess bool `json:"waveai:widgetaccess,omitempty"` WaveAIThinkingLevel string `json:"waveai:thinkinglevel,omitempty"` WaveAIMode string `json:"waveai:mode,omitempty"` + WaveAIProvider string `json:"waveai:provider,omitempty"` + WaveAIIsLocal bool `json:"waveai:islocal,omitempty"` WaveAIFeedback string `json:"waveai:feedback,omitempty" tstype:"\"good\" | \"bad\""` WaveAIAction string `json:"waveai:action,omitempty"` From c4c3fa5350e68eff6f0046cc75916f2a7c42788b Mon Sep 17 00:00:00 2001 From: sawka Date: Mon, 8 Dec 2025 12:45:47 -0800 Subject: [PATCH 03/13] change name of files --- ...rding-upgrade-v0120.tsx => onboarding-upgrade-minor.tsx} | 6 +++--- frontend/app/onboarding/onboarding-upgrade.tsx | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) rename frontend/app/onboarding/{onboarding-upgrade-v0120.tsx => onboarding-upgrade-minor.tsx} (98%) diff --git a/frontend/app/onboarding/onboarding-upgrade-v0120.tsx b/frontend/app/onboarding/onboarding-upgrade-minor.tsx similarity index 98% rename from frontend/app/onboarding/onboarding-upgrade-v0120.tsx rename to frontend/app/onboarding/onboarding-upgrade-minor.tsx index 2e1fe52ca8..331d446edf 100644 --- a/frontend/app/onboarding/onboarding-upgrade-v0120.tsx +++ b/frontend/app/onboarding/onboarding-upgrade-minor.tsx @@ -16,7 +16,7 @@ import { OverlayScrollbarsComponent } from "overlayscrollbars-react"; import { useEffect, useRef, useState } from "react"; import { debounce } from "throttle-debounce"; -const UpgradeOnboardingModal_v0_12_0 = () => { +const UpgradeOnboardingMinor = () => { const modalRef = useRef(null); const [pageName, setPageName] = useState<"welcome" | "features">("welcome"); const [isCompact, setIsCompact] = useState(window.innerHeight < 800); @@ -196,6 +196,6 @@ const UpgradeOnboardingModal_v0_12_0 = () => { ); }; -UpgradeOnboardingModal_v0_12_0.displayName = "UpgradeOnboardingModal_v0_12_0"; +UpgradeOnboardingMinor.displayName = "UpgradeOnboardingMinor"; -export { UpgradeOnboardingModal_v0_12_0 }; \ No newline at end of file +export { UpgradeOnboardingMinor }; \ No newline at end of file diff --git a/frontend/app/onboarding/onboarding-upgrade.tsx b/frontend/app/onboarding/onboarding-upgrade.tsx index c5b86c64b1..4305a58be3 100644 --- a/frontend/app/onboarding/onboarding-upgrade.tsx +++ b/frontend/app/onboarding/onboarding-upgrade.tsx @@ -7,7 +7,7 @@ import { useAtomValue } from "jotai"; import { useEffect, useRef } from "react"; import * as semver from "semver"; import { CurrentOnboardingVersion } from "./onboarding-common"; -import { UpgradeOnboardingModal_v0_12_0 } from "./onboarding-upgrade-v0120"; +import { UpgradeOnboardingMinor } from "./onboarding-upgrade-minor"; import { UpgradeOnboardingPatch } from "./onboarding-upgrade-patch"; const UpgradeOnboardingModal = () => { @@ -34,7 +34,7 @@ const UpgradeOnboardingModal = () => { return ; } - return ; + return ; }; UpgradeOnboardingModal.displayName = "UpgradeOnboardingModal"; From 833909ce399e2abf202b664a3d1979a636e80da0 Mon Sep 17 00:00:00 2001 From: sawka Date: Mon, 8 Dec 2025 13:13:38 -0800 Subject: [PATCH 04/13] fix the openai/openrouter endpoints --- pkg/aiusechat/uctypes/uctypes.go | 2 -- pkg/aiusechat/usechat-mode.go | 57 ++++++++++++++++++++++---------- 2 files changed, 40 insertions(+), 19 deletions(-) diff --git a/pkg/aiusechat/uctypes/uctypes.go b/pkg/aiusechat/uctypes/uctypes.go index 3d00f1ed88..7cedcb19eb 100644 --- a/pkg/aiusechat/uctypes/uctypes.go +++ b/pkg/aiusechat/uctypes/uctypes.go @@ -11,8 +11,6 @@ import ( ) const DefaultAIEndpoint = "https://cfapi.waveterm.dev/api/waveai" -const DefaultOpenAIEndpoint = "https://api.openai.com/v1" -const DefaultOpenRouterEndpoint = "https://openrouter.ai/api/v1" const WaveAIEndpointEnvName = "WAVETERM_WAVEAI_ENDPOINT" const DefaultAnthropicModel = "claude-sonnet-4-5" const DefaultOpenAIModel = "gpt-5-mini" diff --git a/pkg/aiusechat/usechat-mode.go b/pkg/aiusechat/usechat-mode.go index a92c4a3df2..5d629f6779 100644 --- a/pkg/aiusechat/usechat-mode.go +++ b/pkg/aiusechat/usechat-mode.go @@ -15,6 +15,23 @@ import ( var AzureResourceNameRegex = regexp.MustCompile(`^[a-z0-9]([a-z0-9-]*[a-z0-9])?$`) +const ( + OpenAIResponsesEndpoint = "https://api.openai.com/v1/responses" + OpenAIChatEndpoint = "https://api.openai.com/v1/chat/completions" + OpenRouterChatEndpoint = "https://openrouter.ai/api/v1/chat/completions" + AzureLegacyEndpointTemplate = "https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=%s" + AzureResponsesEndpointTemplate = "https://%s.openai.azure.com/openai/v1/responses" + AzureChatEndpointTemplate = "https://%s.openai.azure.com/openai/v1/chat/completions" + GoogleGeminiEndpointTemplate = "https://generativelanguage.googleapis.com/v1beta/models/%s:streamGenerateContent" + + AzureLegacyDefaultAPIVersion = "2025-04-01-preview" + + OpenAIAPITokenSecretName = "OPENAI_KEY" + OpenRouterAPITokenSecretName = "OPENROUTER_KEY" + AzureOpenAIAPITokenSecretName = "AZURE_OPENAI_KEY" + GoogleAIAPITokenSecretName = "GOOGLE_AI_KEY" +) + func resolveAIMode(requestedMode string, premium bool) (string, *wconfig.AIModeConfigType, error) { mode := requestedMode if mode == "" { @@ -52,14 +69,21 @@ func applyProviderDefaults(config *wconfig.AIModeConfigType) { } } if config.Provider == uctypes.AIProvider_OpenAI { - if config.Endpoint == "" { - config.Endpoint = uctypes.DefaultOpenAIEndpoint - } if config.APIType == "" { config.APIType = getOpenAIAPIType(config.Model) } + if config.Endpoint == "" { + switch config.APIType { + case uctypes.APIType_OpenAIResponses: + config.Endpoint = OpenAIResponsesEndpoint + case uctypes.APIType_OpenAIChat: + config.Endpoint = OpenAIChatEndpoint + default: + config.Endpoint = OpenAIChatEndpoint + } + } if config.APITokenSecretName == "" { - config.APITokenSecretName = "OPENAI_KEY" + config.APITokenSecretName = OpenAIAPITokenSecretName } if len(config.Capabilities) == 0 { if isO1Model(config.Model) { @@ -70,29 +94,29 @@ func applyProviderDefaults(config *wconfig.AIModeConfigType) { } } if config.Provider == uctypes.AIProvider_OpenRouter { - if config.Endpoint == "" { - config.Endpoint = uctypes.DefaultOpenRouterEndpoint - } if config.APIType == "" { config.APIType = uctypes.APIType_OpenAIChat } + if config.Endpoint == "" { + config.Endpoint = OpenRouterChatEndpoint + } if config.APITokenSecretName == "" { - config.APITokenSecretName = "OPENROUTER_KEY" + config.APITokenSecretName = OpenRouterAPITokenSecretName } } if config.Provider == uctypes.AIProvider_AzureLegacy { if config.AzureAPIVersion == "" { - config.AzureAPIVersion = "2025-04-01-preview" + config.AzureAPIVersion = AzureLegacyDefaultAPIVersion } if config.Endpoint == "" && isValidAzureResourceName(config.AzureResourceName) && config.AzureDeployment != "" { - config.Endpoint = fmt.Sprintf("https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=%s", + config.Endpoint = fmt.Sprintf(AzureLegacyEndpointTemplate, config.AzureResourceName, config.AzureDeployment, config.AzureAPIVersion) } if config.APIType == "" { config.APIType = uctypes.APIType_OpenAIChat } if config.APITokenSecretName == "" { - config.APITokenSecretName = "AZURE_OPENAI_KEY" + config.APITokenSecretName = AzureOpenAIAPITokenSecretName } } if config.Provider == uctypes.AIProvider_Azure { @@ -103,16 +127,15 @@ func applyProviderDefaults(config *wconfig.AIModeConfigType) { config.APIType = getAzureAPIType(config.Model) } if config.Endpoint == "" && isValidAzureResourceName(config.AzureResourceName) && isAzureAPIType(config.APIType) { - base := fmt.Sprintf("https://%s.openai.azure.com/openai/v1", config.AzureResourceName) switch config.APIType { case uctypes.APIType_OpenAIResponses: - config.Endpoint = base + "/responses" + config.Endpoint = fmt.Sprintf(AzureResponsesEndpointTemplate, config.AzureResourceName) case uctypes.APIType_OpenAIChat: - config.Endpoint = base + "/chat/completions" + config.Endpoint = fmt.Sprintf(AzureChatEndpointTemplate, config.AzureResourceName) } } if config.APITokenSecretName == "" { - config.APITokenSecretName = "AZURE_OPENAI_KEY" + config.APITokenSecretName = AzureOpenAIAPITokenSecretName } } if config.Provider == uctypes.AIProvider_Google { @@ -120,10 +143,10 @@ func applyProviderDefaults(config *wconfig.AIModeConfigType) { config.APIType = uctypes.APIType_GoogleGemini } if config.Endpoint == "" && config.Model != "" { - config.Endpoint = fmt.Sprintf("https://generativelanguage.googleapis.com/v1beta/models/%s:streamGenerateContent", config.Model) + config.Endpoint = fmt.Sprintf(GoogleGeminiEndpointTemplate, config.Model) } if config.APITokenSecretName == "" { - config.APITokenSecretName = "GOOGLE_AI_KEY" + config.APITokenSecretName = GoogleAIAPITokenSecretName } if len(config.Capabilities) == 0 { config.Capabilities = []string{uctypes.AICapabilityTools, uctypes.AICapabilityImages, uctypes.AICapabilityPdfs} From bd7a3768e4d0172eeef07a40a0d3e779f0af5f21 Mon Sep 17 00:00:00 2001 From: sawka Date: Mon, 8 Dec 2025 14:52:21 -0800 Subject: [PATCH 05/13] wording updates on initial screens for v0.13 --- frontend/app/app.scss | 4 +++ frontend/app/element/toggle.tsx | 6 ++-- frontend/app/onboarding/onboarding-common.tsx | 2 +- .../onboarding/onboarding-upgrade-minor.tsx | 32 +++++++++++-------- .../app/onboarding/onboarding-upgrade.tsx | 4 +-- frontend/app/onboarding/onboarding.tsx | 22 +++++++------ 6 files changed, 42 insertions(+), 28 deletions(-) diff --git a/frontend/app/app.scss b/frontend/app/app.scss index 9cbe55aba8..9ce3bc9f5d 100644 --- a/frontend/app/app.scss +++ b/frontend/app/app.scss @@ -35,6 +35,10 @@ body { a.plain-link { color: var(--secondary-text-color); + + &:hover { + text-decoration: underline; + } } *::-webkit-scrollbar { diff --git a/frontend/app/element/toggle.tsx b/frontend/app/element/toggle.tsx index 29de968e4f..2f09fb1e58 100644 --- a/frontend/app/element/toggle.tsx +++ b/frontend/app/element/toggle.tsx @@ -2,6 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 import { useRef } from "react"; +import { cn } from "@/util/util"; import "./toggle.scss"; interface ToggleProps { @@ -9,9 +10,10 @@ interface ToggleProps { onChange: (value: boolean) => void; label?: string; id?: string; + className?: string; } -const Toggle = ({ checked, onChange, label, id }: ToggleProps) => { +const Toggle = ({ checked, onChange, label, id, className }: ToggleProps) => { const inputRef = useRef(null); const handleChange = (e: any) => { @@ -29,7 +31,7 @@ const Toggle = ({ checked, onChange, label, id }: ToggleProps) => { const inputId = id || `toggle-${Math.random().toString(36).substr(2, 9)}`; return ( -
+