Update app.py
Browse files
app.py
CHANGED
@@ -39,11 +39,12 @@ if pdf_obj:
|
|
39 |
user_question = st.text_input("Haz una pregunta sobre tu PDF:")
|
40 |
|
41 |
if user_question:
|
42 |
-
os.environ["
|
43 |
docs = knowledge_base.similarity_search(user_question, 3)
|
44 |
# llm = ChatOpenAI(model_name='gpt-3.5-turbo')
|
45 |
-
llm = HuggingFaceHub(repo_id="lmsys/vicuna-7b-v1.1", model_kwargs={"temperature":0.5, "max_length":512})
|
|
|
46 |
chain = load_qa_chain(llm, chain_type="stuff")
|
47 |
respuesta = chain.run(input_documents=docs, question=user_question)
|
48 |
|
49 |
-
st.write(respuesta)
|
|
|
39 |
user_question = st.text_input("Haz una pregunta sobre tu PDF:")
|
40 |
|
41 |
if user_question:
|
42 |
+
os.environ["HUGGINGFACEHUB_API_TOKEN"] = ""
|
43 |
docs = knowledge_base.similarity_search(user_question, 3)
|
44 |
# llm = ChatOpenAI(model_name='gpt-3.5-turbo')
|
45 |
+
# llm = HuggingFaceHub(repo_id="lmsys/vicuna-7b-v1.1", model_kwargs={"temperature":0.5, "max_length":512})
|
46 |
+
llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
|
47 |
chain = load_qa_chain(llm, chain_type="stuff")
|
48 |
respuesta = chain.run(input_documents=docs, question=user_question)
|
49 |
|
50 |
+
st.write(respuesta)
|