Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -60,16 +60,16 @@ with gr.Blocks() as demo:
|
|
60 |
)
|
61 |
|
62 |
#PUT IT IN A PROMPT TEMPLATE
|
63 |
-
template = """The following is chat between a human and an AI assistant. The AI provides the answer along with the section it referred to for the answer.
|
64 |
-
Current Conversation:
|
65 |
-
{history}
|
66 |
-
Friend: {input}
|
67 |
-
AI:
|
68 |
-
"""
|
69 |
-
PROMPT = PromptTemplate(input_variables=["history", "input"], template=template)
|
70 |
|
71 |
#Initalize lanchain - Conversation Retrieval Chain
|
72 |
-
qa = ConversationalRetrievalChain.from_llm(ChatOpenAI(temperature=0), retriever=db.as_retriever(), memory=memory
|
73 |
|
74 |
|
75 |
|
|
|
60 |
)
|
61 |
|
62 |
#PUT IT IN A PROMPT TEMPLATE
|
63 |
+
#template = """The following is chat between a human and an AI assistant. The AI provides the answer along with the section it referred to for the answer.
|
64 |
+
#Current Conversation:
|
65 |
+
#{history}
|
66 |
+
#Friend: {input}
|
67 |
+
#AI:
|
68 |
+
#"""
|
69 |
+
#PROMPT = PromptTemplate(input_variables=["history", "input"], template=template)
|
70 |
|
71 |
#Initalize lanchain - Conversation Retrieval Chain
|
72 |
+
qa = ConversationalRetrievalChain.from_llm(ChatOpenAI(temperature=0), retriever=db.as_retriever(), memory=memory)
|
73 |
|
74 |
|
75 |
|