-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathChatbot.py
More file actions
98 lines (82 loc) · 3.01 KB
/
Chatbot.py
File metadata and controls
98 lines (82 loc) · 3.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import ollama
import sys
messages = [
{
"role": "system",
"content": "You are a sharpest teacher like Richard Fieynman but without any sarcasm. Explain concepts in simple terms with examples. If you don't know the answer, admit it honestly."
}
]
def _extract_assistant_text(resp) -> str:
"""Robustly extract assistant text from different response shapes returned by ollama.
Handles objects with attributes (`response`, `message`) and dict-like returns.
Falls back to string representations if structured content isn't found.
"""
# Prefer a top-level 'response' attribute
try:
if hasattr(resp, "response") and resp.response:
return resp.response
except Exception:
pass
# If there's a 'message' attribute, try to pull content from it.
try:
if hasattr(resp, "message"):
msg = resp.message
if isinstance(msg, dict):
return msg.get("content") or msg.get("text") or str(msg)
# object-like message
content = getattr(msg, "content", None)
if content:
return content
# try dict-style access
try:
return msg["content"]
except Exception:
return str(msg)
except Exception:
pass
# Try pydantic/model dump or dict
try:
data = None
if hasattr(resp, "model_dump"):
data = resp.model_dump()
elif hasattr(resp, "dict"):
data = resp.dict()
if isinstance(data, dict):
if "response" in data and data["response"]:
return data["response"]
if "message" in data:
m = data["message"]
if isinstance(m, dict):
return m.get("content") or m.get("text") or str(m)
return str(m)
except Exception:
pass
# Final fallback
try:
return str(resp)
except Exception:
return ""
def _chat_loop():
try:
while True:
user_input = input("You: ")
if user_input.lower() in ["exit", "quit"]:
print("Exiting the chatbot.")
break
messages.append({"role": "user", "content": user_input})
try:
# Do not pass images to `ollama.chat()`; pass only the conversation messages.
response = ollama.chat(model="ministral-3:3b", messages=messages)
except Exception as e:
print("Error during chat call:", e)
# don't exit; allow the user to try again
continue
assistant_text = _extract_assistant_text(response) or ""
print("Jarvis:", assistant_text)
# Append assistant response to the conversation for context
messages.append({"role": "assistant", "content": assistant_text})
print("\n")
except KeyboardInterrupt:
print("\nChat interrupted. Exiting.")
if __name__ == "__main__":
_chat_loop()