Manglik-R commited on
Commit
938d0d4
1 Parent(s): 2004747

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -33,7 +33,7 @@ def pdf_changes(pdf_doc):
33
 
34
  llm = Replicate(
35
  model="a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5",
36
- input={"temperature": 0.2, "max_length": 3000, "length_penalty":0.5, "num_beams":3}
37
  )
38
  global qa
39
  qa = ConversationalRetrievalChain.from_llm(
@@ -43,7 +43,7 @@ def pdf_changes(pdf_doc):
43
  )
44
  return "Ready"
45
 
46
- def text(history, text):
47
  result = qa({'question': text, 'chat_history': history})
48
  history.append((text, result['answer']))
49
 
@@ -70,8 +70,8 @@ with gr.Blocks(css=css) as demo:
70
  question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ")
71
  submit_btn = gr.Button("Send message")
72
  load_pdf.click(pdf_changes, inputs=[pdf_doc], outputs=[langchain_status], queue=False)
73
- question.submit(text, [chatbot, question], [chatbot, question])
74
- submit_btn.click(text, [chatbot, question], [chatbot, question])
75
 
76
  demo.launch()
77
 
 
33
 
34
  llm = Replicate(
35
  model="a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5",
36
+ input={"temperature": 0.4, "max_length": 3000, "length_penalty":0.1, "num_beams":3}
37
  )
38
  global qa
39
  qa = ConversationalRetrievalChain.from_llm(
 
43
  )
44
  return "Ready"
45
 
46
+ def query(history, text):
47
  result = qa({'question': text, 'chat_history': history})
48
  history.append((text, result['answer']))
49
 
 
70
  question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ")
71
  submit_btn = gr.Button("Send message")
72
  load_pdf.click(pdf_changes, inputs=[pdf_doc], outputs=[langchain_status], queue=False)
73
+ question.submit(query, [chatbot, question], [chatbot, question])
74
+ submit_btn.click(query, [chatbot, question], [chatbot, question])
75
 
76
  demo.launch()
77