lchakkei commited on
Commit
c3714b0
1 Parent(s): 5eff215

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +5 -4
handler.py CHANGED
@@ -87,19 +87,20 @@ class EndpointHandler():
87
  compressor = LLMChainExtractor.from_llm(chat)
88
  retriever = ContextualCompressionRetriever(base_compressor=compressor, base_retriever=retriever)
89
 
90
- _template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.
91
  Chat History:
92
  {chat_history}
93
  Follow Up Input: {question}
94
- Standalone question:"""
95
 
96
  CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
97
 
98
- template = """Answer the question based only on the following context:
99
  {context}
100
 
101
- Question: {question}
102
  """
 
103
  ANSWER_PROMPT = ChatPromptTemplate.from_template(template)
104
 
105
  self.memory = ConversationBufferMemory(
 
87
  compressor = LLMChainExtractor.from_llm(chat)
88
  retriever = ContextualCompressionRetriever(base_compressor=compressor, base_retriever=retriever)
89
 
90
+ _template = """[INST] Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.
91
  Chat History:
92
  {chat_history}
93
  Follow Up Input: {question}
94
+ Standalone question: [/INST]"""
95
 
96
  CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
97
 
98
+ template = """[INST] Answer the question based only on the following context:
99
  {context}
100
 
101
+ Question: {question} [/INST]
102
  """
103
+
104
  ANSWER_PROMPT = ChatPromptTemplate.from_template(template)
105
 
106
  self.memory = ConversationBufferMemory(