Spaces:
Running
Running
app16
Browse files
app.py
CHANGED
@@ -66,11 +66,10 @@ vectordb = Chroma.from_documents(
|
|
66 |
# define retriever
|
67 |
retriever = vectordb.as_retriever(search_type="mmr")
|
68 |
template = """Your name is AngryGreta and you are a recycling chatbot with the objective and poorpose to help people with waste management to improve environmental situation. Use the following pieces of context to answer the question if the question is related with recycling. Answer in the same language of the question. Keep the answer as concise as possible. Always say "thanks for asking!" at the end of the answer.
|
69 |
-
|
70 |
-
|
71 |
-
{
|
72 |
-
|
73 |
-
Helpful Answer:"""
|
74 |
|
75 |
# Create the chat prompt templates
|
76 |
system_prompt = SystemMessagePromptTemplate.from_template(template)
|
@@ -99,21 +98,21 @@ qa_chain = ConversationalRetrievalChain.from_llm(
|
|
99 |
llm = llm,
|
100 |
memory = memory,
|
101 |
retriever = retriever,
|
102 |
-
verbose =
|
103 |
combine_docs_chain_kwargs={'prompt': qa_prompt},
|
104 |
get_chat_history = lambda h : h
|
105 |
)
|
106 |
|
107 |
def chat_interface(question):
|
108 |
|
109 |
-
|
110 |
print("Debug: Result from qa_chain.run:", result)
|
111 |
|
112 |
# Check the structure of the result
|
113 |
-
if isinstance(
|
114 |
-
return
|
115 |
else:
|
116 |
-
return "Unexpected
|
117 |
|
118 |
chatbot_gradio_app = gr.Interface(
|
119 |
fn=chat_interface,
|
|
|
66 |
# define retriever
|
67 |
retriever = vectordb.as_retriever(search_type="mmr")
|
68 |
template = """Your name is AngryGreta and you are a recycling chatbot with the objective and poorpose to help people with waste management to improve environmental situation. Use the following pieces of context to answer the question if the question is related with recycling. Answer in the same language of the question. Keep the answer as concise as possible. Always say "thanks for asking!" at the end of the answer.
|
69 |
+
context: {context}
|
70 |
+
chat history: {chat_history}
|
71 |
+
question: {question}
|
72 |
+
"""
|
|
|
73 |
|
74 |
# Create the chat prompt templates
|
75 |
system_prompt = SystemMessagePromptTemplate.from_template(template)
|
|
|
98 |
llm = llm,
|
99 |
memory = memory,
|
100 |
retriever = retriever,
|
101 |
+
verbose = True,
|
102 |
combine_docs_chain_kwargs={'prompt': qa_prompt},
|
103 |
get_chat_history = lambda h : h
|
104 |
)
|
105 |
|
106 |
def chat_interface(question):
|
107 |
|
108 |
+
answer = qa_chain.run({"question": question})
|
109 |
print("Debug: Result from qa_chain.run:", result)
|
110 |
|
111 |
# Check the structure of the result
|
112 |
+
if isinstance(answer, str):
|
113 |
+
return answer # If the result is a string, return it directly
|
114 |
else:
|
115 |
+
return "Unexpected answer format"
|
116 |
|
117 |
chatbot_gradio_app = gr.Interface(
|
118 |
fn=chat_interface,
|