-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
74 lines (48 loc) · 1.96 KB
/
app.py
File metadata and controls
74 lines (48 loc) · 1.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import streamlit as st
from langchain.schema import Document
import PyPDF2
from langchain_community.vectorstores import FAISS
from langchain.embeddings import HuggingFaceEmbeddings
st.header('A RAG App')
groqapi=''
from langchain.text_splitter import RecursiveCharacterTextSplitter
splitter=RecursiveCharacterTextSplitter(chunk_size=1000,chunk_overlap=200)
uploaded_file=r'C:\Users\ASHWINI\Downloads\Cheenai_LTT.pdf'
#Read PDF with PyPDF2
text=''
pdf_reader=PyPDF2.PdfReader(uploaded_file)
for page in pdf_reader.pages:
text += page.extract_text()+ '\n'
#split txt into chunks(strings)
splitter=RecursiveCharacterTextSplitter(chunk_size=1000,chunk_overlap=200)
text_chunk=splitter.split_text(text)
#convert each chunk to documents format
docs=[Document(page_content=chunk) for chunk in text_chunk]
st.subheader('Document splitted successfully')
#create embeddings model
embeddings=HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
#create FAISS vector store for documents
vectordb=FAISS.from_documents(docs,embeddings)
st.success("FAISS VectorStore created successfully")
retriver=vectordb.as_retriever()
from langchain.chat_models import init_chat_model
model=init_chat_model(model='gemma2-9b-it',model_provider='groq',api_key=groqapi)
from langchain.prompts import PromptTemplate
template="""
You are a helpful assistant. Answer the question using only the context below.
If the answer is not present, just say no. Do not try to make up an answer.
Context:
{context}
Question:
{question}
Helpful Answer:
"""
rag_prompt=PromptTemplate(input_variables=['context','question'],template=template)
user_query=st.text_input(" ? Ask a question about the PDF")
if user_query:
relevant_docs=retriver.invoke(user_query)
final_prompt=rag_prompt.format(context=relevant_docs,question=user_query)
with st.spinner(" Generating answer..."):
response=model.invoke(final_prompt)
st.write('### Answer')
st.write(response.content)