Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ static
!static/styles.css
config.json
config.yml
litellm_config.yaml

.data
tmp
Expand Down
69 changes: 69 additions & 0 deletions server/lib/openai_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,75 @@ def get_openai_client(opts=None):
)


def get_vendor_from_opts(opts=None):
"""
Determine the AI vendor string from opts.

When using LiteLLM, tries to infer the actual provider from the model
name in opts (e.g. "claude-*" -> "anthropic"). Falls back to "litellm"
if the model name doesn't match a known pattern.

Returns one of: "openai", "azure", "anthropic", "google", "mistral",
"meta", "cohere", or "litellm".
Comment on lines +97 to +98
Copy link

Copilot AI Apr 7, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The get_vendor_from_opts docstring enumerates return values, but _infer_vendor_from_model_name() can also return values like "groq" and "bedrock" (via prefix_map). Please update the docstring to match the actual possible return values, or avoid listing a closed set if this is intended to be extensible.

Suggested change
Returns one of: "openai", "azure", "anthropic", "google", "mistral",
"meta", "cohere", or "litellm".
Returns "azure" for Azure OpenAI credentials, "openai" for direct
OpenAI credentials, or an inferred provider name when using LiteLLM.
Inferred LiteLLM provider names are extensible and may include values
such as "anthropic", "google", "mistral", "meta", "cohere", "groq",
or "bedrock"; if no provider can be inferred, returns "litellm".

Copilot uses AI. Check for mistakes.
"""
opts = opts or {}
litellm_url = (opts.get("LITELLM_PROXY_URL") or "").strip()
litellm_key = (opts.get("LITELLM_MASTER_KEY") or "").strip()
if litellm_url and litellm_key:
model = opts.get("model") or ""
inferred = _infer_vendor_from_model_name(model)
return inferred if inferred else "litellm"

azure_endpoint = (opts.get("AZURE_OPENAI_ENDPOINT") or "").strip()
azure_api_key = (opts.get("AZURE_OPENAI_API_KEY") or "").strip()
if azure_endpoint and azure_api_key:
return "azure"

return "openai"
Comment on lines +89 to +113
Copy link

Copilot AI Apr 7, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New vendor-resolution logic is being introduced here, but there are no unit tests covering get_vendor_from_opts() / _infer_vendor_from_model_name() across the documented scenarios (direct OpenAI vs Azure vs LiteLLM w/ prefixed + bare model names). Adding focused tests would help prevent regressions and ensure the vendor stored in vCon analysis matches configuration.

Copilot uses AI. Check for mistakes.


def _infer_vendor_from_model_name(model_name):
"""Infer vendor from a model name string. Returns None if unknown.

Handles LiteLLM provider-prefixed names (e.g. "azure/gpt-4o",
"anthropic/claude-3") as well as bare model names (e.g. "gpt-4o-mini").
"""
if not model_name:
return None
parts = model_name.lower().split("/")
# If a provider prefix is present, use it directly
if len(parts) > 1:
prefix_map = {
"openai": "openai",
"azure": "azure",
"anthropic": "anthropic",
"google": "google",
"vertex_ai": "google",
"mistral": "mistral",
"meta": "meta",
"cohere": "cohere",
"groq": "groq",
"bedrock": "bedrock",
}
if parts[0] in prefix_map:
return prefix_map[parts[0]]
# Fall back to model name pattern matching on the last segment
m = parts[-1]
if m.startswith("claude"):
return "anthropic"
if m.startswith("gpt-") or m.startswith("o1") or m.startswith("o3") or m.startswith("chatgpt"):
return "openai"
Comment on lines +124 to +146
Copy link

Copilot AI Apr 7, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_infer_vendor_from_model_name() says "If a provider prefix is present, use it directly", but if the prefix is not in prefix_map the function falls back to pattern matching on the last segment (e.g. custom/gpt-4o would currently infer openai). If an explicit provider prefix is present but unknown, it seems more accurate to treat it as unknown (return None so the caller can fall back to "litellm"), or return the raw prefix string and document that behavior.

Copilot uses AI. Check for mistakes.
if m.startswith("gemini"):
return "google"
if m.startswith("mistral") or m.startswith("mixtral"):
return "mistral"
if m.startswith("llama") or m.startswith("meta-llama"):
return "meta"
if m.startswith("command"):
return "cohere"
return None


def get_async_openai_client(opts=None):
"""
Return an async OpenAI-compatible client. Same opts semantics as get_openai_client.
Expand Down
4 changes: 2 additions & 2 deletions server/links/analyze/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from lib.vcon_redis import VconRedis
from lib.logging_utils import init_logger
from lib.openai_client import get_openai_client
from lib.openai_client import get_openai_client, get_vendor_from_opts
import logging
from tenacity import (
retry,
Expand Down Expand Up @@ -169,7 +169,7 @@ def run(
vCon.add_analysis(
type=opts["analysis_type"],
dialog=index,
vendor="openai",
vendor=get_vendor_from_opts(opts),
body=analysis,
Comment on lines 169 to 173
Copy link

Copilot AI Apr 7, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Now that vendor can be non-openai (Azure/LiteLLM/Anthropic/etc), the metrics emitted in this link still use the conserver.link.openai.* names. Consider renaming these metrics to a provider-agnostic namespace (or adding a vendor attribute) so dashboards/alerts remain accurate when using non-OpenAI backends.

Copilot uses AI. Check for mistakes.
encoding="none",
extra={
Expand Down
Loading