removed chathistory
Browse files
app.py
CHANGED
@@ -119,7 +119,7 @@ def rerank_with_bm25(docs, query):
|
|
119 |
|
120 |
|
121 |
# ---------------------- History-Aware CAG ----------------------
|
122 |
-
def retrieve_from_cag(user_query
|
123 |
query_embedding = semantic_model.encode(user_query, convert_to_tensor=True)
|
124 |
cosine_scores = util.cos_sim(query_embedding, qa_embeddings)[0]
|
125 |
best_idx = int(np.argmax(cosine_scores))
|
@@ -132,7 +132,7 @@ def retrieve_from_cag(user_query, chat_history):
|
|
132 |
return None, best_score
|
133 |
|
134 |
# ---------------------- History-Aware RAG ----------------------
|
135 |
-
def retrieve_from_rag(user_query
|
136 |
# Combine history with current query
|
137 |
#history_context = " ".join([f"User: {msg[0]} Bot: {msg[1]}" for msg in chat_history]) + " "
|
138 |
#full_query = history_context + user_query
|
@@ -167,10 +167,7 @@ def generate_via_openrouter(context, query, chat_history=None):
|
|
167 |
print("\n--- Generating via OpenRouter ---")
|
168 |
print("Context received:", context)
|
169 |
|
170 |
-
|
171 |
-
if chat_history:
|
172 |
-
history_text = "\n".join([f"User: {q}\nBot: {a}" for q, a in chat_history[-2:]]) # Last 2 exchanges only
|
173 |
-
|
174 |
prompt = f"""<s>[INST]
|
175 |
You are a Moodle expert assistant.
|
176 |
Instructions:
|
@@ -206,14 +203,14 @@ def chatbot(query, chat_history):
|
|
206 |
print("User Query:", query)
|
207 |
|
208 |
# Try to retrieve from CAG (cache)
|
209 |
-
answer, score = retrieve_from_cag(query
|
210 |
if answer:
|
211 |
print("Answer retrieved from CAG cache.")
|
212 |
-
|
213 |
return answer
|
214 |
|
215 |
# If not found, retrieve from RAG
|
216 |
-
docs = retrieve_from_rag(query
|
217 |
if docs:
|
218 |
context_blocks = []
|
219 |
for doc in docs:
|
@@ -232,12 +229,12 @@ def chatbot(query, chat_history):
|
|
232 |
|
233 |
# Choose the generation backend (OpenRouter)
|
234 |
response = generate_via_openrouter(context, query)
|
235 |
-
|
236 |
return response
|
237 |
|
238 |
else:
|
239 |
print("No relevant documents found.")
|
240 |
-
|
241 |
return "Je ne sais pas."
|
242 |
|
243 |
# ---------------------- Gradio App ----------------------
|
@@ -255,14 +252,14 @@ def save_chat_to_file(chat_history):
|
|
255 |
|
256 |
return file_path
|
257 |
|
258 |
-
def ask(user_message, chat_history):
|
259 |
-
|
260 |
-
|
261 |
|
262 |
-
|
263 |
-
|
264 |
|
265 |
-
|
266 |
|
267 |
# Initialize chat history with a welcome message
|
268 |
initial_message = (None, "Hello, how can I help you with Moodle?")
|
|
|
119 |
|
120 |
|
121 |
# ---------------------- History-Aware CAG ----------------------
|
122 |
+
def retrieve_from_cag(user_query):
|
123 |
query_embedding = semantic_model.encode(user_query, convert_to_tensor=True)
|
124 |
cosine_scores = util.cos_sim(query_embedding, qa_embeddings)[0]
|
125 |
best_idx = int(np.argmax(cosine_scores))
|
|
|
132 |
return None, best_score
|
133 |
|
134 |
# ---------------------- History-Aware RAG ----------------------
|
135 |
+
def retrieve_from_rag(user_query):
|
136 |
# Combine history with current query
|
137 |
#history_context = " ".join([f"User: {msg[0]} Bot: {msg[1]}" for msg in chat_history]) + " "
|
138 |
#full_query = history_context + user_query
|
|
|
167 |
print("\n--- Generating via OpenRouter ---")
|
168 |
print("Context received:", context)
|
169 |
|
170 |
+
|
|
|
|
|
|
|
171 |
prompt = f"""<s>[INST]
|
172 |
You are a Moodle expert assistant.
|
173 |
Instructions:
|
|
|
203 |
print("User Query:", query)
|
204 |
|
205 |
# Try to retrieve from CAG (cache)
|
206 |
+
answer, score = retrieve_from_cag(query)
|
207 |
if answer:
|
208 |
print("Answer retrieved from CAG cache.")
|
209 |
+
|
210 |
return answer
|
211 |
|
212 |
# If not found, retrieve from RAG
|
213 |
+
docs = retrieve_from_rag(query)
|
214 |
if docs:
|
215 |
context_blocks = []
|
216 |
for doc in docs:
|
|
|
229 |
|
230 |
# Choose the generation backend (OpenRouter)
|
231 |
response = generate_via_openrouter(context, query)
|
232 |
+
# chat_history.append((query, response)) # Append the new question-answer pair to history
|
233 |
return response
|
234 |
|
235 |
else:
|
236 |
print("No relevant documents found.")
|
237 |
+
# chat_history.append((query, "Je ne sais pas."))
|
238 |
return "Je ne sais pas."
|
239 |
|
240 |
# ---------------------- Gradio App ----------------------
|
|
|
252 |
|
253 |
return file_path
|
254 |
|
255 |
+
#def ask(user_message, chat_history):
|
256 |
+
# if not user_message:
|
257 |
+
# return chat_history, chat_history, ""
|
258 |
|
259 |
+
# response = chatbot(user_message, chat_history)
|
260 |
+
# chat_history.append((user_message, response))
|
261 |
|
262 |
+
# return chat_history, chat_history, ""
|
263 |
|
264 |
# Initialize chat history with a welcome message
|
265 |
initial_message = (None, "Hello, how can I help you with Moodle?")
|