@@ -50,7 +50,13 @@ def create_llm_service(llm_config: Dict[str, Any]):
5050 logger .info (f'Creating LLM service: { llm_type } / { model } ' )
5151
5252 if llm_type == 'openai' :
53- return LLMServiceFactory ._create_openai_llm (api_key , model , parameters )
53+ base_url = llm_config .get ('base_url' )
54+ if not base_url or base_url == 'https://api.openai.com/v1' :
55+ return LLMServiceFactory ._create_openai_llm (api_key , model , parameters )
56+ else :
57+ return LLMServiceFactory ._create_openai_compatible_llm (
58+ api_key , model , parameters , base_url
59+ )
5460 elif llm_type == 'azure_openai' :
5561 base_url = llm_config .get ('base_url' )
5662 if not base_url :
@@ -97,6 +103,36 @@ def _create_openai_llm(api_key: str, model: str, parameters: Dict[str, Any]):
97103
98104 return OpenAILLMService (api_key = api_key , model = model , params = input_params )
99105
106+ @staticmethod
107+ def _create_openai_compatible_llm (
108+ api_key : str , model : str , parameters : Dict [str , Any ], base_url : str
109+ ):
110+ """Create a BaseOpenAILLMService for OpenAI-compatible endpoints"""
111+ params_dict = {}
112+
113+ if 'temperature' in parameters :
114+ params_dict ['temperature' ] = parameters ['temperature' ]
115+ if 'max_completion_tokens' in parameters :
116+ params_dict ['max_completion_tokens' ] = parameters ['max_completion_tokens' ]
117+ if 'top_p' in parameters :
118+ params_dict ['top_p' ] = parameters ['top_p' ]
119+ if 'frequency_penalty' in parameters :
120+ params_dict ['frequency_penalty' ] = parameters ['frequency_penalty' ]
121+ if 'presence_penalty' in parameters :
122+ params_dict ['presence_penalty' ] = parameters ['presence_penalty' ]
123+ if 'seed' in parameters :
124+ params_dict ['seed' ] = parameters ['seed' ]
125+
126+ input_params = BaseOpenAILLMService .InputParams (** params_dict )
127+
128+ logger .info (
129+ f"OpenAI-compatible LLM config: model={ model } , base_url={ base_url } , temp={ params_dict .get ('temperature' , 'default' )} "
130+ )
131+
132+ return BaseOpenAILLMService (
133+ api_key = api_key , model = model , base_url = base_url , params = input_params
134+ )
135+
100136 @staticmethod
101137 def _create_google_llm (api_key : str , model : str , parameters : Dict [str , Any ]):
102138 """Create Google LLM service"""
0 commit comments