jamescg commited on
Commit
b277c5c
1 Parent(s): 65ab164

Update query_data.py

Browse files
Files changed (1) hide show
  1. query_data.py +26 -0
query_data.py CHANGED
@@ -5,3 +5,29 @@ from langchain.chains import ChatVectorDBChain
5
  _template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
6
  You can assume the question about the Deceleration of the cell cycle underpins a switch from proliferative to terminal divisions in plant stomatal lineage.
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  _template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
6
  You can assume the question about the Deceleration of the cell cycle underpins a switch from proliferative to terminal divisions in plant stomatal lineage.
7
 
8
+ Chat History:
9
+ {chat_history}
10
+ Follow Up Input: {question}
11
+ Standalone question:"""
12
+ CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
13
+
14
+ template = """You are an AI assistant for answering questions about the Deceleration of the cell cycle.
15
+ You are given the following extracted parts of a long document and a question. Provide a conversational answer.
16
+ If you don't know the answer, just say "Hmm, I'm not sure." Don't try to make up an answer.
17
+ If the question is not about the Deceleration of the cell cycle, politely inform them that you are tuned to only answer questions about the Deceleration of the cell cycle.
18
+ Question: {question}
19
+ {context}
20
+ =========
21
+ Answer in Markdown:"""
22
+ QA_PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])
23
+
24
+
25
+ def get_chain(vectorstore):
26
+ llm = OpenAI(temperature=0)
27
+ qa_chain = ChatVectorDBChain.from_llm(
28
+ llm,
29
+ vectorstore,
30
+ qa_prompt=QA_PROMPT,
31
+ condense_question_prompt=CONDENSE_QUESTION_PROMPT,
32
+ )
33
+ return qa_chain