diff --git a/main.py b/main.py
index 743fe02..6d74545 100644
--- a/main.py
+++ b/main.py
@@ -12,13 +12,10 @@
from langchain.prompts import PromptTemplate
import re
-# Ensure that the OPENAI_API_KEY is set
os.environ['OPENAI_API_KEY'] = os.getenv("OPENAI_API_KEY")
-# Set page config
st.set_page_config(page_title="๐ฐ ResearchMate", page_icon="๐ฐ", layout="wide")
-# Apply custom CSS for light and dark theme
st.markdown("""
""", unsafe_allow_html=True)
-# Main title with animation
st.markdown("""
๐ฐ ResearchMate
@@ -173,11 +169,9 @@
""", unsafe_allow_html=True)
-# Sidebar
with st.sidebar:
st.title("๐ Upload Content")
-# Initialize session state
if 'urls' not in st.session_state:
st.session_state.urls = ['']
if 'pdf_files' not in st.session_state:
@@ -187,11 +181,9 @@
if 'chat_history' not in st.session_state:
st.session_state.chat_history = []
-# Function to add a new URL input
def add_url():
st.session_state.urls.append('')
-# Display URL input fields dynamically
with st.sidebar.expander("Add URLs", expanded=True):
for i in range(len(st.session_state.urls)):
st.session_state.urls[i] = st.text_input(f"URL {i+1}", value=st.session_state.urls[i], key=f"url_{i}")
@@ -199,7 +191,6 @@ def add_url():
if st.sidebar.button("โ Add another URL"):
add_url()
-# PDF file uploader
uploaded_files = st.sidebar.file_uploader("Upload PDF files", type=['pdf'], accept_multiple_files=True)
if uploaded_files:
st.session_state.pdf_files = uploaded_files
@@ -225,7 +216,6 @@ def preprocess_text(text):
if process_content_clicked:
documents = []
- # Process URLs
valid_urls = [url for url in st.session_state.urls if url.strip()]
if valid_urls:
main_placeholder.text("Processing URLs...")
@@ -268,7 +258,6 @@ def preprocess_text(text):
else:
st.warning("No content to process. Please add URLs or upload PDF files.")
-# Custom prompt template
template = """You are an AI assistant tasked with answering questions based on the given context.
Use the information provided in the context to answer the question concisely and avoid repetition.
I am saying this strictly that If the answer cannot be found in the context, simply state that you don't have enough information to answer accurately.
@@ -282,7 +271,6 @@ def preprocess_text(text):
input_variables=["context", "question"]
)
-# Create the RetrievalQA chain
if 'qa' not in st.session_state and st.session_state.vectorstore is not None:
st.session_state.qa = RetrievalQA.from_chain_type(
llm=chat_model,
@@ -292,7 +280,6 @@ def preprocess_text(text):
return_source_documents=True
)
-# Display chat history
for message in st.session_state.chat_history:
message_class = "user-message" if message["role"] == "user" else "bot-message"
with st.container():
@@ -302,7 +289,6 @@ def preprocess_text(text):
""", unsafe_allow_html=True)
-# Chat input
if prompt := st.chat_input("๐ฌ What would you like to know?"):
st.session_state.chat_history.append({"role": "user", "content": prompt})
with st.container():
@@ -349,7 +335,6 @@ def preprocess_text(text):
st.session_state.chat_history.append({"role": "assistant", "content": full_response})
-# Add buttons to clear chat history and processed content
st.sidebar.write("## Manage Session")
if st.sidebar.button("๐งน Clear Chat History"):
st.session_state.chat_history = []