-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbot.py
More file actions
123 lines (98 loc) · 4.55 KB
/
bot.py
File metadata and controls
123 lines (98 loc) · 4.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import logging
from logging.handlers import RotatingFileHandler
from telegram import Update
from telegram.ext import Application, MessageHandler, filters, ContextTypes
from dotenv import load_dotenv
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain.chains import ConversationChain
from langchain.memory import ConversationSummaryBufferMemory
from langchain_core.messages import SystemMessage
from langchain_core.prompts.chat import (
ChatPromptTemplate,
HumanMessagePromptTemplate,
MessagesPlaceholder,
)
# Load environment variables securely
load_dotenv(override=True)
# Global conversation state
LOG_LEVEL = 'INFO'
MAX_LOG_SIZE = 5 * 1024 * 1024 # 5MB
# Dictionary to store memory for each user
user_memories = {}
###########################################################################################################################
# LLM Model Setup
repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
llm = HuggingFaceEndpoint(
repo_id=repo_id,
task="text-generation",
max_new_tokens=4096,
top_k=8,
top_p=0.8,
typical_p=0.8,
temperature=0.001,
repetition_penalty=1.20,)
llm_engine_hf = ChatHuggingFace(llm=llm)
###########################################################################################################################
def setup_user_logging(user_id, user_name):
"""Set up a rotating log file for each user."""
logger = logging.getLogger(f"user_{user_id}")
# Check if the logger already has handlers to avoid adding them multiple times
if not logger.handlers:
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(message)s")
# Create a directory for user logs if it doesn't exist
if not os.path.exists("user_logs"):
os.makedirs("user_logs")
# Create a rotating log handler for the specific user
file_handler = RotatingFileHandler(f"user_logs/chatlog_{user_id}_{user_name}.log", maxBytes=2*1024*1024, backupCount=5)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
###########################################################################################################################
async def handle_message(update: Update, context: ContextTypes.DEFAULT_TYPE):
"""Handles messages from users."""
user = update.effective_user
user_message = update.message.text
user_id = user.id
# Setup user-specific logger
user_logger = setup_user_logging(user_id, user.full_name)
user_logger.info(f"User: {user_message}")
try:
await context.bot.send_chat_action(chat_id=update.effective_chat.id, action="typing")
# Retrieve or create memory for the user
if user_id not in user_memories:
memory = ConversationSummaryBufferMemory(llm=llm_engine_hf, max_token_limit=2048, return_messages=True)
user_memories[user_id] = memory
else:
memory = user_memories[user_id]
template_messages = [
SystemMessage(content="<|begin_of_text|><|start_header_id|>system<|end_header_id|> \
You are a helpful assistant that answers accurately and concrectly only in the same language in which the user is talking<|eot_id|>"),
MessagesPlaceholder(variable_name="history"),
HumanMessagePromptTemplate.from_template("{input}"),
]
prompt_template = ChatPromptTemplate.from_messages(template_messages)
# Create conversation chain with user's memory
conversation = ConversationChain(llm=llm_engine_hf, memory=memory, prompt= prompt_template)
# Generate response
response = conversation.predict(input=user_message)
# Log and send bot's response
user_logger.info(f"Bot: {response}")
await update.message.reply_text(response)
except Exception as e:
user_logger.error(f"Error in handle_message: {str(e)}")
await update.message.reply_text("Sorry, something went wrong. We're working to fix it! ")
###########################################################################################################################
def main():
bot_token = os.getenv('BOT_TOKEN')
application = Application.builder().token(bot_token).build()
# application.bot_data.clear()
# Message Handler setup
message_handler = MessageHandler(filters.TEXT, handle_message)
application.add_handler(message_handler)
application.run_polling(allowed_updates=Update.ALL_TYPES)
if __name__ == "__main__":
main()