-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtranslate_model.py
More file actions
36 lines (32 loc) · 1.21 KB
/
translate_model.py
File metadata and controls
36 lines (32 loc) · 1.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import os
import langchain_community
from logger import logger
llm = None
clinet = None
def traslate_text(text):
from langchain_community.llms import Ollama
global llm
if not llm:
Ollama(base_url="http://192.168.1.2:11434", model="qwen3:8b")
# Initialize the Ollama LLM
# Generate text from the model
response = llm.invoke("以下是小学一年级课本中的教学课文的英文语句,请将这句英文翻译成中文{}, 请通顺翻译,并且只需要翻译结果,不需要额外解释。".format(text))
logger.info(response)
def traslate_text(text):
global clinet
if not clinet:
from ollama import Client
client = Client(host='http://192.168.1.2:11434')
response = client.chat(model='qwen3:8b', messages=[
{
'role': 'user',
'content': "请将这句英文翻译成中文{}。直接给出翻译结果,不需要多余的开场白和解释。".format(text),
},
])
ctx = response["message"]["content"]
logger.info("traslate_text:{}".format(ctx))
return ctx
if __name__ == "__main__":
# traslate_text("I love you")
text = "I love you"
traslate_text(text)