Gaurav-2273 commited on
Commit
ac31900
1 Parent(s): 505e588

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +106 -2
app.py CHANGED
@@ -95,6 +95,103 @@
95
 
96
 
97
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
98
  import gradio as gr
99
  import json
100
  from typing import List, Dict
@@ -178,11 +275,18 @@ def answer_query(question: str):
178
  chat_history.append((question, result['answer']))
179
  return chat_history
180
 
 
 
 
 
 
 
 
181
  with gr.Blocks() as demo:
182
  # Reset chat history and memory on initialization
183
- chat_history = []
184
  initialize_chatbot_from_json("embeddings.json", openai_api_key)
185
-
186
  chatbot = gr.Chatbot(label="Chatbot")
187
  question = gr.Textbox(label="Ask a question", placeholder="Type your question...")
188
  question.submit(answer_query, inputs=[question], outputs=[chatbot])
 
95
 
96
 
97
 
98
+ # import gradio as gr
99
+ # import json
100
+ # from typing import List, Dict
101
+ # from langchain_openai.embeddings import OpenAIEmbeddings
102
+ # from langchain_chroma import Chroma
103
+ # from langchain.retrievers.multi_query import MultiQueryRetriever
104
+ # from langchain.chains import ConversationalRetrievalChain
105
+ # from langchain.memory import ConversationBufferMemory
106
+ # from langchain_openai import ChatOpenAI
107
+ # from langchain.schema import Document
108
+ # from langchain.chains import LLMChain
109
+ # from langchain.chains.question_answering import load_qa_chain
110
+ # from langchain.prompts import PromptTemplate
111
+ # import os
112
+
113
+ # openai_api_key = os.getenv("OPENAI_API_KEY")
114
+
115
+ # vectorstore = None
116
+ # llm = None
117
+ # qa_instance = None
118
+ # chat_history = []
119
+
120
+ # def load_embeddings_from_json(json_file_path: str):
121
+ # with open(json_file_path, 'r') as f:
122
+ # data = json.load(f)
123
+ # chunks = [item['chunk'] for item in data]
124
+ # embeddings = [item['embeddings'] for item in data]
125
+ # ids = [item.get('id', str(index)) for index, item in enumerate(data)]
126
+ # return chunks, embeddings, ids
127
+
128
+ # def initialize_chatbot_from_json(json_file_path: str, openai_api_key: str):
129
+ # global vectorstore, llm, qa_instance
130
+ # if vectorstore is None:
131
+ # chunks, embeddings, ids = load_embeddings_from_json(json_file_path)
132
+ # vectorstore = Chroma(
133
+ # collection_name="my_collection",
134
+ # persist_directory=None,
135
+ # embedding_function=OpenAIEmbeddings(api_key=openai_api_key)
136
+ # )
137
+ # vectorstore._client._add(
138
+ # collection_id=vectorstore._collection.id,
139
+ # ids=ids,
140
+ # embeddings=embeddings,
141
+ # metadatas=[{"source": "json"} for _ in chunks],
142
+ # documents=chunks,
143
+ # )
144
+ # if llm is None:
145
+ # llm = ChatOpenAI(api_key=openai_api_key, temperature=0.5, model="gpt-4o", verbose=True)
146
+ # retriever = MultiQueryRetriever.from_llm(retriever=vectorstore.as_retriever(), llm=llm)
147
+ # memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
148
+ # _template = """Given the following conversation and a follow up question, rephrase the follow up question to be a
149
+ # standalone question without changing the content in given question.
150
+ # Chat History:
151
+ # {chat_history}
152
+ # Follow Up Input: {question}
153
+ # Standalone question:"""
154
+ # condense_question_prompt_template = PromptTemplate.from_template(_template)
155
+ # prompt_template = """You are a highly informative and helpful QA System specialized in providing information related to the UPSC Exam but strictly within the 'Context'. Ensure you only answer questions that are relevant to the UPSC Exam. If the question asked is not in 'Context' and not related to the UPSC Exam, do not provide an answer. Always answer in an informative and highly detailed manner, oriented towards the UPSC Exam. Also never just answer the Query, Never tell anything about 'Context'. Dont use unnecessary lines!
156
+ # Context:
157
+ # {context}
158
+ # Question: {question}
159
+ # Helpful Answer:"""
160
+ # qa_prompt = PromptTemplate(
161
+ # template=prompt_template, input_variables=["context", "question"]
162
+ # )
163
+ # question_generator = LLMChain(llm=llm, prompt=condense_question_prompt_template, memory=memory)
164
+ # doc_chain = load_qa_chain(llm, chain_type="stuff", prompt=qa_prompt)
165
+ # qa_instance = ConversationalRetrievalChain(
166
+ # retriever=retriever,
167
+ # question_generator=question_generator,
168
+ # combine_docs_chain=doc_chain,
169
+ # memory=memory)
170
+
171
+ # def answer_query(question: str):
172
+ # global chat_history
173
+ # if qa_instance is None:
174
+ # return [("Please initialize the system first.", "")]
175
+ # if not question.strip():
176
+ # return [("Please enter a question.", "")]
177
+ # result = qa_instance({"question": question})
178
+ # chat_history.append((question, result['answer']))
179
+ # return chat_history
180
+
181
+ # with gr.Blocks() as demo:
182
+ # # Reset chat history and memory on initialization
183
+ # chat_history = []
184
+ # initialize_chatbot_from_json("embeddings.json", openai_api_key)
185
+
186
+ # chatbot = gr.Chatbot(label="Chatbot")
187
+ # question = gr.Textbox(label="Ask a question", placeholder="Type your question...")
188
+ # question.submit(answer_query, inputs=[question], outputs=[chatbot])
189
+
190
+ # if __name__ == "__main__":
191
+ # demo.launch()
192
+
193
+
194
+
195
  import gradio as gr
196
  import json
197
  from typing import List, Dict
 
275
  chat_history.append((question, result['answer']))
276
  return chat_history
277
 
278
+ def reset_chatbot():
279
+ global chat_history, qa_instance
280
+ chat_history = []
281
+ # Reinitialize the memory for qa_instance
282
+ memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
283
+ qa_instance.memory = memory
284
+
285
  with gr.Blocks() as demo:
286
  # Reset chat history and memory on initialization
287
+ reset_chatbot()
288
  initialize_chatbot_from_json("embeddings.json", openai_api_key)
289
+
290
  chatbot = gr.Chatbot(label="Chatbot")
291
  question = gr.Textbox(label="Ask a question", placeholder="Type your question...")
292
  question.submit(answer_query, inputs=[question], outputs=[chatbot])