-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathapp.py
More file actions
112 lines (92 loc) · 3.77 KB
/
app.py
File metadata and controls
112 lines (92 loc) · 3.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import streamlit as st
import openai
from typing import List, Dict
from utils.models import get_local_models
from utils.prompts import LANGUAGE_PROMPTS
from components.sidebar import render_sidebar
# Configure OpenAI client for Ollama
client = openai.OpenAI(
base_url='http://localhost:11434/v1',
api_key='ollama' # required but unused
)
def init_session_state():
"""Initialize session state variables"""
if 'messages' not in st.session_state:
st.session_state.messages = []
if 'settings' not in st.session_state:
st.session_state.settings = None
def display_chat_history():
"""Display chat messages"""
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
def get_system_prompt(settings: dict) -> str:
"""Generate system prompt based on settings"""
language_prompt = LANGUAGE_PROMPTS.get(settings['language'], "")
return f"""You are a helpful coding assistant. {language_prompt}
Provide clear, well-commented code examples.
Format code blocks with proper markdown syntax using triple backticks."""
def main():
st.set_page_config(
page_title="Maux Local AI Code Assistant",
page_icon="🤖",
layout="wide"
)
# Add Maux branding
col1, col2 = st.columns([3, 1])
with col1:
st.title("🤖 Maux Local AI Code Assistant")
with col2:
st.markdown("""
<div style='text-align: right; padding-top: 20px;'>
<a href='https://ai.maux.space' target='_blank' style='color: #FF4B4B;'>
Try our hosted version ↗
</a>
</div>
""", unsafe_allow_html=True)
init_session_state()
# Render sidebar and get settings
models = get_local_models()
settings = render_sidebar(models)
st.session_state.settings = settings
# Display chat history
display_chat_history()
# Chat input
if prompt := st.chat_input("Ask your coding question..."):
if not settings['model']:
st.error("Please select a model first!")
st.stop()
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# Get AI response
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
try:
# Get context window messages
context_messages = st.session_state.messages[-settings['context_window']:]
stream = client.chat.completions.create(
model=settings['model'],
messages=[
{"role": "system", "content": get_system_prompt(settings)},
*[{"role": m["role"], "content": m["content"]} for m in context_messages]
],
stream=True,
temperature=settings['temperature'],
max_tokens=settings['max_tokens'],
top_p=settings['top_p']
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
full_response += chunk.choices[0].delta.content
message_placeholder.markdown(full_response + "▌")
message_placeholder.markdown(full_response)
except Exception as e:
st.error(f"Error: {str(e)}")
st.stop()
# Add assistant response to chat history
st.session_state.messages.append({"role": "assistant", "content": full_response})
if __name__ == "__main__":
main()