Skip to content

Commit 72ed9c9

Browse files
pavanputhraclaude
andauthored
fix(CON-519): resolve vendor from opts instead of hardcoding openai in analyze link (#146)
Adds get_vendor_from_opts() to openai_client.py that returns the correct vendor based on configuration: "openai", "azure", or when using LiteLLM, infers from the model name (e.g. azure/gpt-4o → "azure", claude-* → "anthropic") with "litellm" as fallback. Also adds litellm_config.yaml to .gitignore since it contains API keys. Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent 92290b2 commit 72ed9c9

File tree

3 files changed

+72
-2
lines changed

3 files changed

+72
-2
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ static
1212
!static/styles.css
1313
config.json
1414
config.yml
15+
litellm_config.yaml
1516

1617
.data
1718
tmp

server/lib/openai_client.py

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,75 @@ def get_openai_client(opts=None):
8686
)
8787

8888

89+
def get_vendor_from_opts(opts=None):
90+
"""
91+
Determine the AI vendor string from opts.
92+
93+
When using LiteLLM, tries to infer the actual provider from the model
94+
name in opts (e.g. "claude-*" -> "anthropic"). Falls back to "litellm"
95+
if the model name doesn't match a known pattern.
96+
97+
Returns one of: "openai", "azure", "anthropic", "google", "mistral",
98+
"meta", "cohere", or "litellm".
99+
"""
100+
opts = opts or {}
101+
litellm_url = (opts.get("LITELLM_PROXY_URL") or "").strip()
102+
litellm_key = (opts.get("LITELLM_MASTER_KEY") or "").strip()
103+
if litellm_url and litellm_key:
104+
model = opts.get("model") or ""
105+
inferred = _infer_vendor_from_model_name(model)
106+
return inferred if inferred else "litellm"
107+
108+
azure_endpoint = (opts.get("AZURE_OPENAI_ENDPOINT") or "").strip()
109+
azure_api_key = (opts.get("AZURE_OPENAI_API_KEY") or "").strip()
110+
if azure_endpoint and azure_api_key:
111+
return "azure"
112+
113+
return "openai"
114+
115+
116+
def _infer_vendor_from_model_name(model_name):
117+
"""Infer vendor from a model name string. Returns None if unknown.
118+
119+
Handles LiteLLM provider-prefixed names (e.g. "azure/gpt-4o",
120+
"anthropic/claude-3") as well as bare model names (e.g. "gpt-4o-mini").
121+
"""
122+
if not model_name:
123+
return None
124+
parts = model_name.lower().split("/")
125+
# If a provider prefix is present, use it directly
126+
if len(parts) > 1:
127+
prefix_map = {
128+
"openai": "openai",
129+
"azure": "azure",
130+
"anthropic": "anthropic",
131+
"google": "google",
132+
"vertex_ai": "google",
133+
"mistral": "mistral",
134+
"meta": "meta",
135+
"cohere": "cohere",
136+
"groq": "groq",
137+
"bedrock": "bedrock",
138+
}
139+
if parts[0] in prefix_map:
140+
return prefix_map[parts[0]]
141+
# Fall back to model name pattern matching on the last segment
142+
m = parts[-1]
143+
if m.startswith("claude"):
144+
return "anthropic"
145+
if m.startswith("gpt-") or m.startswith("o1") or m.startswith("o3") or m.startswith("chatgpt"):
146+
return "openai"
147+
if m.startswith("gemini"):
148+
return "google"
149+
if m.startswith("mistral") or m.startswith("mixtral"):
150+
return "mistral"
151+
if m.startswith("llama") or m.startswith("meta-llama"):
152+
return "meta"
153+
if m.startswith("command"):
154+
return "cohere"
155+
return None
156+
157+
89158
def get_async_openai_client(opts=None):
90159
"""
91160
Return an async OpenAI-compatible client. Same opts semantics as get_openai_client.

server/links/analyze/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from lib.vcon_redis import VconRedis
22
from lib.logging_utils import init_logger
3-
from lib.openai_client import get_openai_client
3+
from lib.openai_client import get_openai_client, get_vendor_from_opts
44
import logging
55
from tenacity import (
66
retry,
@@ -169,7 +169,7 @@ def run(
169169
vCon.add_analysis(
170170
type=opts["analysis_type"],
171171
dialog=index,
172-
vendor="openai",
172+
vendor=get_vendor_from_opts(opts),
173173
body=analysis,
174174
encoding="none",
175175
extra={

0 commit comments

Comments
 (0)