-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchatbot.py
More file actions
117 lines (96 loc) · 3.68 KB
/
chatbot.py
File metadata and controls
117 lines (96 loc) · 3.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
# chatbot.py
import os
from dotenv import load_dotenv
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from groq import Groq
import requests
# Load environment variables
load_dotenv()
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
NASA_API_KEY = os.getenv("NASA_API_KEY")
# Initialize Groq client
client = Groq(api_key=GROQ_API_KEY)
# Load FAISS index
print("🔄 Loading FAISS index...")
embeddings = HuggingFaceEmbeddings(
model_name="sentence-transformers/all-MiniLM-L6-v2"
)
vectorstore = FAISS.load_local(
"embeddings/faiss_index",
embeddings,
allow_dangerous_deserialization=True
)
print("✅ FAISS index loaded!\n")
def get_nasa_apod():
"""Fetch today's Astronomy Picture of the Day"""
try:
url = f"https://api.nasa.gov/planetary/apod?api_key={NASA_API_KEY}"
response = requests.get(url)
data = response.json()
return {
'title': data.get('title'),
'url': data.get('url'),
'explanation': data.get('explanation')
}
except:
return None
def search_knowledge_base(query, k=3):
"""Search FAISS index for relevant information"""
docs = vectorstore.similarity_search(query, k=k)
context = "\n\n".join([doc.page_content for doc in docs])
return context
def generate_response(user_question, context):
"""Generate response using Groq LLM"""
system_prompt = """You are a helpful space and astronomy assistant.
Use the provided context to answer questions about space, NASA, astronomy, and related topics.
If the context doesn't contain relevant information, use your general knowledge but mention that.
Be conversational, enthusiastic, and educational."""
user_prompt = f"""Context from knowledge base:
{context}
User question: {user_question}
Please provide a helpful and engaging answer."""
chat_completion = client.chat.completions.create(
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt}
],
model="llama-3.1-70b-versatile", # Fast and high quality
temperature=0.7,
max_tokens=500
)
return chat_completion.choices[0].message.content
def main():
print("🚀 Space Chatbot with RAG + NASA API")
print("=" * 50)
print("Ask me anything about space, astronomy, or NASA!")
print("Type 'apod' to see today's Astronomy Picture of the Day")
print("Type 'quit' to exit\n")
while True:
user_input = input("You: ").strip()
if not user_input:
continue
if user_input.lower() in ['quit', 'exit', 'bye']:
print("👋 Thanks for chatting! Keep exploring space!")
break
if user_input.lower() == 'apod':
print("\n🔄 Fetching today's APOD...")
apod = get_nasa_apod()
if apod:
print(f"\n📸 {apod['title']}")
print(f"🔗 {apod['url']}")
print(f"\n{apod['explanation'][:300]}...")
else:
print("❌ Couldn't fetch APOD")
print()
continue
# Search knowledge base
print("\n🔍 Searching knowledge base...")
context = search_knowledge_base(user_input)
# Generate response
print("🤖 Generating response...\n")
response = generate_response(user_input, context)
print(f"Bot: {response}\n")
print("-" * 50 + "\n")
if __name__ == "__main__":
main()