Jatinydv commited on
Commit
88f03bb
1 Parent(s): a43049b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -14
app.py CHANGED
@@ -19,11 +19,25 @@ Helpful answer:
19
  """
20
 
21
  def set_custom_prompt():
 
 
 
22
  prompt = PromptTemplate(template=custom_prompt_template,
23
  input_variables=['context', 'question'])
24
  return prompt
25
 
 
 
 
 
 
 
 
 
 
 
26
  def load_llm():
 
27
  llm = CTransformers(
28
  model="TheBloke/Llama-2-7B-Chat-GGML",
29
  model_type="llama",
@@ -32,33 +46,32 @@ def load_llm():
32
  )
33
  return llm
34
 
 
35
  def qa_bot(query):
36
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
37
  model_kwargs={'device': 'cpu'})
38
  db = FAISS.load_local(DB_FAISS_PATH, embeddings)
39
  llm = load_llm()
40
  qa_prompt = set_custom_prompt()
41
- qa = RetrievalQA.from_chain_type(llm=llm,
42
- chain_type='stuff',
43
- retriever=db.as_retriever(search_kwargs={'k': 2}),
44
- return_source_documents=True,
45
- chain_type_kwargs={'prompt': qa_prompt}
46
- )
47
  response = qa({'query': query})
48
- return response['result'], response['source_documents']
49
-
50
- def main(query):
51
- answer, sources = qa_bot(query)
52
  if sources:
53
  answer += f"\nSources: {sources}"
54
  else:
55
  answer += "\nNo sources found"
56
  return answer
57
 
58
- iface = gr.Interface(fn=main,
59
- inputs=gr.inputs.Textbox(label="Enter your medical query"),
60
- outputs=gr.outputs.Textbox(label="Answer"),
 
61
  title="Medical Bot",
62
  description="Ask any medical query and get an answer with sources if available.")
63
- iface.launch()
 
 
 
64
 
 
19
  """
20
 
21
  def set_custom_prompt():
22
+ """
23
+ Prompt template for QA retrieval for each vectorstore
24
+ """
25
  prompt = PromptTemplate(template=custom_prompt_template,
26
  input_variables=['context', 'question'])
27
  return prompt
28
 
29
+ # Retrieval QA Chain
30
+ def retrieval_qa_chain(llm, prompt, db):
31
+ qa_chain = RetrievalQA.from_chain_type(llm=llm,
32
+ chain_type='stuff',
33
+ retriever=db.as_retriever(search_kwargs={'k': 2}),
34
+ return_source_documents=True,
35
+ chain_type_kwargs={'prompt': prompt})
36
+ return qa_chain
37
+
38
+ # Loading the model
39
  def load_llm():
40
+ # Load the locally downloaded model here
41
  llm = CTransformers(
42
  model="TheBloke/Llama-2-7B-Chat-GGML",
43
  model_type="llama",
 
46
  )
47
  return llm
48
 
49
+ # QA Model Function
50
  def qa_bot(query):
51
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
52
  model_kwargs={'device': 'cpu'})
53
  db = FAISS.load_local(DB_FAISS_PATH, embeddings)
54
  llm = load_llm()
55
  qa_prompt = set_custom_prompt()
56
+ qa = retrieval_qa_chain(llm, qa_prompt, db)
57
+
 
 
 
 
58
  response = qa({'query': query})
59
+ answer = response['result']
60
+ sources = response['source_documents']
 
 
61
  if sources:
62
  answer += f"\nSources: {sources}"
63
  else:
64
  answer += "\nNo sources found"
65
  return answer
66
 
67
+ # Gradio interface
68
+ iface = gr.Interface(fn=qa_bot,
69
+ inputs=gr.Textbox(label="Enter your medical query"),
70
+ outputs=gr.Textbox(label="Answer"),
71
  title="Medical Bot",
72
  description="Ask any medical query and get an answer with sources if available.")
73
+
74
+ if __name__ == "__main__":
75
+ iface.launch()
76
+
77