VishnuRamDebyez commited on
Commit
52136b3
·
verified ·
1 Parent(s): 5deb874

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -65
app.py CHANGED
@@ -14,90 +14,90 @@ import time
14
  # Load environment variables
15
  load_dotenv()
16
 
17
- groq_api_key = os.getenv('groqapi')
18
- google_api_key = os.getenv("GOOGLE_API_KEY")
19
-
20
- if not groq_api_key or not google_api_key:
21
- st.error("API keys are missing. Please check your environment variables.")
22
- st.stop()
23
 
24
- os.environ["GOOGLE_API_KEY"] = google_api_key
 
 
25
 
 
26
  st.title("Legal Assistant")
27
 
28
- # Initialize LLM
29
- llm = ChatGroq(groq_api_key=groq_api_key, model_name="Llama3-8b-8192")
 
 
 
 
30
 
 
 
31
  prompt = ChatPromptTemplate.from_template(
32
- """
33
- Answer the questions based on the provided context only.
34
- Please provide the most accurate response based on the question.
35
- <context>
36
- {context}
37
- <context>
38
- Questions: {input}
39
- """
40
  )
41
 
42
- @st.cache_resource
43
  def vector_embedding():
44
- embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
45
- loader = PyPDFDirectoryLoader("./new")
46
-
47
- # Check if directory exists
48
- if not os.path.exists("./new"):
49
- st.error("The directory './new' does not exist. Please provide the correct path.")
50
- st.stop()
51
-
52
- docs = loader.load()
53
- if not docs:
54
- st.error("No PDF files found in the './new' directory.")
55
- st.stop()
56
-
57
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
58
- final_documents = text_splitter.split_documents(docs[:20])
59
- vectors = FAISS.from_documents(final_documents, embeddings)
60
- return vectors
61
 
62
- st.session_state.vectors = vector_embedding()
 
63
 
64
- # Initialize chat history
65
- if "chat_history" not in st.session_state:
66
- st.session_state.chat_history = []
67
-
68
- # Sidebar for chat history
69
- with st.sidebar:
70
- st.title("Chat History")
71
- for idx, chat in enumerate(st.session_state.chat_history):
72
- # Create a button for each question
73
- if st.button(f"Q{idx+1}: {chat['question']}"):
74
- # When the button is clicked, display the corresponding answer below the question
75
- st.session_state.selected_answer = chat['answer']
76
- st.session_state.selected_question = chat['question']
77
-
78
- # Show selected chat history question and answer
79
- if 'selected_answer' in st.session_state:
80
- st.write(f"**Q:** {st.session_state.selected_question}")
81
- st.write(f"**A:** {st.session_state.selected_answer}")
82
- else:
83
- st.write("No question selected from chat history yet.")
84
-
85
- # User input for new question
86
  prompt1 = st.text_input("Enter Your Question From Documents")
87
 
 
 
 
 
 
 
 
 
88
  if prompt1:
89
- with st.spinner("Retrieving the best answer..."):
 
90
  document_chain = create_stuff_documents_chain(llm, prompt)
91
  retriever = st.session_state.vectors.as_retriever()
92
  retrieval_chain = create_retrieval_chain(retriever, document_chain)
93
 
 
94
  start = time.process_time()
95
  response = retrieval_chain.invoke({'input': prompt1})
96
- elapsed_time = time.process_time() - start
97
 
98
- answer = response.get('answer', "No answer found.")
99
- st.success(f"Response Time: {elapsed_time:.2f} seconds")
100
- st.write(answer)
101
 
102
- # Store the question and answer in chat history
103
- st.session_state.chat_history.append({"question": prompt1, "answer": answer})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  # Load environment variables
15
  load_dotenv()
16
 
17
+ # Set page configuration
18
+ st.set_page_config(page_title="Legal Assistant", layout="wide")
 
 
 
 
19
 
20
+ # Initialize session state for chat history if not exists
21
+ if 'chat_history' not in st.session_state:
22
+ st.session_state.chat_history = []
23
 
24
+ # Title
25
  st.title("Legal Assistant")
26
 
27
+ # Sidebar setup
28
+ st.sidebar.title("Chat History")
29
+
30
+ # API Key Configuration
31
+ groq_api_key = os.getenv('groqapi')
32
+ os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
33
 
34
+ # LLM and Prompt Setup
35
+ llm = ChatGroq(groq_api_key=groq_api_key, model_name="Llama3-8b-8192")
36
  prompt = ChatPromptTemplate.from_template(
37
+ """
38
+ Answer the questions based on the provided context only.
39
+ Please provide the most accurate response based on the question
40
+ <context>
41
+ {context}
42
+ <context>
43
+ Questions:{input}
44
+ """
45
  )
46
 
 
47
  def vector_embedding():
48
+ """Perform vector embedding of documents"""
49
+ if "vectors" not in st.session_state:
50
+ st.session_state.embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
51
+ st.session_state.loader = PyPDFDirectoryLoader("./new") # Data Ingestion
52
+ st.session_state.docs = st.session_state.loader.load() # Document Loading
53
+ st.session_state.text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200) # Chunk Creation
54
+ st.session_state.final_documents = st.session_state.text_splitter.split_documents(st.session_state.docs[:20]) # splitting
55
+ st.session_state.vectors = FAISS.from_documents(st.session_state.final_documents, st.session_state.embeddings)
 
 
 
 
 
 
 
 
 
56
 
57
+ # Perform vector embedding
58
+ vector_embedding()
59
 
60
+ # Main input area
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
  prompt1 = st.text_input("Enter Your Question From Documents")
62
 
63
+ # Function to add to chat history
64
+ def add_to_chat_history(question, answer):
65
+ st.session_state.chat_history.append({
66
+ 'question': question,
67
+ 'answer': answer
68
+ })
69
+
70
+ # Process question and generate response
71
  if prompt1:
72
+ try:
73
+ # Create document and retrieval chains
74
  document_chain = create_stuff_documents_chain(llm, prompt)
75
  retriever = st.session_state.vectors.as_retriever()
76
  retrieval_chain = create_retrieval_chain(retriever, document_chain)
77
 
78
+ # Generate response
79
  start = time.process_time()
80
  response = retrieval_chain.invoke({'input': prompt1})
81
+ response_time = time.process_time() - start
82
 
83
+ # Display response
84
+ st.write(response['answer'])
 
85
 
86
+ # Add to chat history
87
+ add_to_chat_history(prompt1, response['answer'])
88
+
89
+ except Exception as e:
90
+ st.error(f"An error occurred: {e}")
91
+
92
+ # Sidebar content
93
+ # Clear chat history button
94
+ if st.sidebar.button("Clear Chat History"):
95
+ st.session_state.chat_history = []
96
+
97
+ # Display chat history
98
+ st.sidebar.write("### Previous Questions")
99
+ for idx, chat in enumerate(reversed(st.session_state.chat_history), 1):
100
+ # Create a button for each previous question
101
+ if st.sidebar.button(f"Question {len(st.session_state.chat_history) - idx + 1}: {chat['question']}"):
102
+ # Display the corresponding answer when the button is clicked
103
+ st.sidebar.write(f"**Answer:** {chat['answer']}")