-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchatbot_app.py
More file actions
52 lines (41 loc) · 1.77 KB
/
chatbot_app.py
File metadata and controls
52 lines (41 loc) · 1.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import streamlit as st
from chatbot.llm_utils import get_llm_response, get_decision_from_llm, parse_llm_decision, run_sql, format_sql_results
import sqlite3
# Database settings
DB_PATH = "ocr_results.db"
# --- Connect to SQLite ---
conn = sqlite3.connect(DB_PATH, check_same_thread=False)
cursor = conn.cursor()
# --- Streamlit App ---
st.title("EcoStep Chatbot")
if 'messages' not in st.session_state:
st.session_state['messages'] = []
# Display conversation history
for msg in st.session_state['messages']:
with st.chat_message(msg['role']):
st.markdown(msg['content'])
# User input
if prompt := st.chat_input("Ask me anything..."):
st.session_state['messages'].append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
# Get raw LLM decision
llm_response = get_decision_from_llm(prompt)
# Parse the response
mode, payload = parse_llm_decision(llm_response)
# Process according to mode
if mode == "SQL":
try:
# Run SQL but don't show it to user
sql_results = run_sql(payload, cursor)
# Pass results back to LLM for formatting
content = format_sql_results(prompt, sql_results)
except Exception as e:
content = f"I'm having trouble accessing that information right now."
else:
content = payload
# Display and store response
st.markdown(content)
st.session_state['messages'].append({"role": "assistant", "content": content})