Skip to content

Commit 508bad7

Browse files
committed
we dont need preferred models honestly, it all lives in the lite llm package anyway
1 parent 4e06f55 commit 508bad7

1 file changed

Lines changed: 3 additions & 17 deletions

File tree

agentstack/providers.py

Lines changed: 3 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,28 +14,12 @@
1414
'ollama': 'ollama',
1515
}
1616

17-
# Fallback models if litellm.model_cost is empty or fails
18-
# Perhaps useful for an offline mode for people with tons of storage and no internet
19-
# Think: Government "quiet" rooms etc.
20-
PREFERRED_MODELS = [
21-
'groq/deepseek-r1-distill-llama-70b',
22-
'deepseek/deepseek-chat',
23-
'deepseek/deepseek-coder',
24-
'deepseek/deepseek-reasoner',
25-
'openai/gpt-4o',
26-
'anthropic/claude-3-5-sonnet', # this has the wrong name, fixed on my other branch.
27-
'openai/o1-preview',
28-
'openai/gpt-4-turbo',
29-
'anthropic/claude-3-opus',
30-
]
31-
3217

3318
@lru_cache(maxsize=1)
3419
def get_available_models() -> List[str]:
3520
"""
3621
Get list of available models in provider/model format.
3722
Results are cached to avoid processing multiple times.
38-
Falls back to PREFERRED_MODELS if fetching fails.
3923
"""
4024
models = []
4125

@@ -46,7 +30,9 @@ def get_available_models() -> List[str]:
4630
models.append(f"{provider}/{model}")
4731

4832
except Exception:
49-
models = PREFERRED_MODELS.copy()
33+
# since the models exist in the package, this should only throw
34+
# in the case of a breaking change or bug in litellm.
35+
raise Exception("Failed to parse models from litellm.")
5036

5137
return sorted(models)
5238

0 commit comments

Comments
 (0)