-
Notifications
You must be signed in to change notification settings - Fork 12
fix(CON-519): resolve vendor from opts instead of hardcoding openai in analyze link #146
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -13,6 +13,7 @@ static | |
| !static/styles.css | ||
| config.json | ||
| config.yml | ||
| litellm_config.yaml | ||
|
|
||
| .data | ||
| tmp | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -86,6 +86,75 @@ def get_openai_client(opts=None): | |
| ) | ||
|
|
||
|
|
||
| def get_vendor_from_opts(opts=None): | ||
| """ | ||
| Determine the AI vendor string from opts. | ||
|
|
||
| When using LiteLLM, tries to infer the actual provider from the model | ||
| name in opts (e.g. "claude-*" -> "anthropic"). Falls back to "litellm" | ||
| if the model name doesn't match a known pattern. | ||
|
|
||
| Returns one of: "openai", "azure", "anthropic", "google", "mistral", | ||
| "meta", "cohere", or "litellm". | ||
| """ | ||
| opts = opts or {} | ||
| litellm_url = (opts.get("LITELLM_PROXY_URL") or "").strip() | ||
| litellm_key = (opts.get("LITELLM_MASTER_KEY") or "").strip() | ||
| if litellm_url and litellm_key: | ||
| model = opts.get("model") or "" | ||
| inferred = _infer_vendor_from_model_name(model) | ||
| return inferred if inferred else "litellm" | ||
|
|
||
| azure_endpoint = (opts.get("AZURE_OPENAI_ENDPOINT") or "").strip() | ||
| azure_api_key = (opts.get("AZURE_OPENAI_API_KEY") or "").strip() | ||
| if azure_endpoint and azure_api_key: | ||
| return "azure" | ||
|
|
||
| return "openai" | ||
|
Comment on lines
+89
to
+113
|
||
|
|
||
|
|
||
| def _infer_vendor_from_model_name(model_name): | ||
| """Infer vendor from a model name string. Returns None if unknown. | ||
|
|
||
| Handles LiteLLM provider-prefixed names (e.g. "azure/gpt-4o", | ||
| "anthropic/claude-3") as well as bare model names (e.g. "gpt-4o-mini"). | ||
| """ | ||
| if not model_name: | ||
| return None | ||
| parts = model_name.lower().split("/") | ||
| # If a provider prefix is present, use it directly | ||
| if len(parts) > 1: | ||
| prefix_map = { | ||
| "openai": "openai", | ||
| "azure": "azure", | ||
| "anthropic": "anthropic", | ||
| "google": "google", | ||
| "vertex_ai": "google", | ||
| "mistral": "mistral", | ||
| "meta": "meta", | ||
| "cohere": "cohere", | ||
| "groq": "groq", | ||
| "bedrock": "bedrock", | ||
| } | ||
| if parts[0] in prefix_map: | ||
| return prefix_map[parts[0]] | ||
| # Fall back to model name pattern matching on the last segment | ||
| m = parts[-1] | ||
| if m.startswith("claude"): | ||
| return "anthropic" | ||
| if m.startswith("gpt-") or m.startswith("o1") or m.startswith("o3") or m.startswith("chatgpt"): | ||
| return "openai" | ||
|
Comment on lines
+124
to
+146
|
||
| if m.startswith("gemini"): | ||
| return "google" | ||
| if m.startswith("mistral") or m.startswith("mixtral"): | ||
| return "mistral" | ||
| if m.startswith("llama") or m.startswith("meta-llama"): | ||
| return "meta" | ||
| if m.startswith("command"): | ||
| return "cohere" | ||
| return None | ||
|
|
||
|
|
||
| def get_async_openai_client(opts=None): | ||
| """ | ||
| Return an async OpenAI-compatible client. Same opts semantics as get_openai_client. | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,6 +1,6 @@ | ||
| from lib.vcon_redis import VconRedis | ||
| from lib.logging_utils import init_logger | ||
| from lib.openai_client import get_openai_client | ||
| from lib.openai_client import get_openai_client, get_vendor_from_opts | ||
| import logging | ||
| from tenacity import ( | ||
| retry, | ||
|
|
@@ -169,7 +169,7 @@ def run( | |
| vCon.add_analysis( | ||
| type=opts["analysis_type"], | ||
| dialog=index, | ||
| vendor="openai", | ||
| vendor=get_vendor_from_opts(opts), | ||
| body=analysis, | ||
|
Comment on lines
169
to
173
|
||
| encoding="none", | ||
| extra={ | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The
get_vendor_from_optsdocstring enumerates return values, but_infer_vendor_from_model_name()can also return values like"groq"and"bedrock"(viaprefix_map). Please update the docstring to match the actual possible return values, or avoid listing a closed set if this is intended to be extensible.