-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapp.py
67 lines (51 loc) · 2.53 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from os import getenv, path
import streamlit as st
from dotenv import load_dotenv
from langchain.llms import OpenAI
from dataloader import DataLoader
class App:
"""A Streamlit application for PaperParrot."""
def __init__(self):
"""Initialize the application."""
load_dotenv()
self.uploaded_file = None
self.uploaded_file_path = None
self.store = None
self.agent_executor = None
self.shared_token = st.text_input("Enter shared token", type="password")
if self.shared_token.strip() != getenv('SHARING_TOKEN'):
st.stop()
# Add sidebar with description
st.sidebar.markdown("## PaperParrot")
st.sidebar.markdown("A POC for processing PDF using LLM")
st.title('PaperParrot')
self.llm = OpenAI(temperature=0.1, verbose=True, api_key=getenv('OPENAI_API_KEY'))
self.uploaded_file = st.file_uploader("Choose a PDF file", type="pdf")
if self.uploaded_file is not None:
self.save_file_to_uploads_dir()
self.train_model()
if prompt := st.text_input('Fire away with your follow-up questions!'):
self.handle_prompt(prompt)
def train_model(self):
"""Train the model with the uploaded PDF."""
st.info("Hold on tight! We're loading the PDF and training ChatGPT")
progress = st.progress(0)
vector_db_manager = DataLoader(self.llm)
self.store, self.agent_executor = vector_db_manager.process_pdf(self.uploaded_file_path)
# Update progress bar to 100%
progress.progress(100)
st.success('PDF is loaded and our model is trained. Get ready to ask your side-splitting questions.')
def save_file_to_uploads_dir(self):
"""Write the mutable uploaded file object to disk"""
self.uploaded_file_path = path.join(getenv('UPLOAD_DIRECTORY'), self.uploaded_file.name)
with open(self.uploaded_file_path, 'wb') as f:
f.write(self.uploaded_file.getvalue())
def handle_prompt(self, prompt):
"""Handle the user prompt."""
st.write(self.agent_executor.run(prompt)) # Then pass the prompt to the LLM and write it out to the screen
# With a streamlit expander
with st.expander('Related Sections for Above Information'):
# Find the relevant pages and Write out the first
st.write(self.store.similarity_search_with_score(prompt)[0][0].page_content)
if __name__ == "__main__":
App()