Spaces:
Sleeping
Sleeping
added a prompt template
Browse files
app.py
CHANGED
@@ -7,6 +7,7 @@ from langchain.chains import ConversationalRetrievalChain
|
|
7 |
from langchain.chat_models import ChatOpenAI
|
8 |
from langchain.memory import ConversationBufferMemory
|
9 |
from langchain.vectorstores import Pinecone
|
|
|
10 |
|
11 |
|
12 |
BOOK_TOKEN = os.getenv("book")
|
@@ -56,9 +57,18 @@ with gr.Blocks() as demo:
|
|
56 |
memory_key='chat_history',
|
57 |
return_messages=False
|
58 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
#Initalize lanchain - Conversation Retrieval Chain
|
61 |
-
qa = ConversationalRetrievalChain.from_llm(ChatOpenAI(temperature=0), retriever=db.as_retriever(), memory=memory)
|
62 |
|
63 |
|
64 |
|
|
|
7 |
from langchain.chat_models import ChatOpenAI
|
8 |
from langchain.memory import ConversationBufferMemory
|
9 |
from langchain.vectorstores import Pinecone
|
10 |
+
from langchian.prompts.prompt import PromptTemplate
|
11 |
|
12 |
|
13 |
BOOK_TOKEN = os.getenv("book")
|
|
|
57 |
memory_key='chat_history',
|
58 |
return_messages=False
|
59 |
)
|
60 |
+
|
61 |
+
#PUT IT IN A PROMPT TEMPLATE
|
62 |
+
template = """The following is chat between a human and an AI assistant. The AI provides the answer along with the section it referred to for the answer.
|
63 |
+
Current Conversation:
|
64 |
+
{history}
|
65 |
+
Friend: {input}
|
66 |
+
AI:
|
67 |
+
"""
|
68 |
+
PROMPT = PromptTemplate(input_variables=["history", "input"], template=template)
|
69 |
|
70 |
#Initalize lanchain - Conversation Retrieval Chain
|
71 |
+
qa = ConversationalRetrievalChain.from_llm(ChatOpenAI(temperature=0), retriever=db.as_retriever(), memory=memory, prompt=PROMPT)
|
72 |
|
73 |
|
74 |
|