Update functions.py
Browse files- functions.py +4 -4
functions.py
CHANGED
@@ -189,9 +189,9 @@ def embed_text(query,corpus,title,embedding_model,_emb_tok,chain_type='stuff'):
|
|
189 |
prompt=PROMPT,
|
190 |
)
|
191 |
|
192 |
-
answer = chain({"input_documents": docs, "question": query}, return_only_outputs=
|
193 |
|
194 |
-
return answer
|
195 |
|
196 |
elif chain_type == 'Refined':
|
197 |
|
@@ -200,9 +200,9 @@ def embed_text(query,corpus,title,embedding_model,_emb_tok,chain_type='stuff'):
|
|
200 |
)
|
201 |
chain = load_qa_chain(OpenAI(temperature=0), chain_type="refine", return_refine_steps=False,
|
202 |
question_prompt=initial_qa_prompt, refine_prompt=refine_prompt)
|
203 |
-
answer = chain({"input_documents": docs, "question": query}, return_only_outputs=
|
204 |
|
205 |
-
return answer
|
206 |
|
207 |
@st.experimental_singleton(suppress_st_warning=True)
|
208 |
def get_spacy():
|
|
|
189 |
prompt=PROMPT,
|
190 |
)
|
191 |
|
192 |
+
answer = chain({"input_documents": docs, "question": query}, return_only_outputs=False)
|
193 |
|
194 |
+
return answer
|
195 |
|
196 |
elif chain_type == 'Refined':
|
197 |
|
|
|
200 |
)
|
201 |
chain = load_qa_chain(OpenAI(temperature=0), chain_type="refine", return_refine_steps=False,
|
202 |
question_prompt=initial_qa_prompt, refine_prompt=refine_prompt)
|
203 |
+
answer = chain({"input_documents": docs, "question": query}, return_only_outputs=False)
|
204 |
|
205 |
+
return answer
|
206 |
|
207 |
@st.experimental_singleton(suppress_st_warning=True)
|
208 |
def get_spacy():
|