File size: 2,345 Bytes
a43049b
cb40971
 
 
 
 
 
c72a561
cb40971
c72a561
cb40971
 
c72a561
cb40971
 
c72a561
cb40971
 
 
c72a561
cb40971
 
 
 
c72a561
cb40971
 
8b709fd
cb40971
8b709fd
 
cb40971
 
c72a561
fc55ef6
cb40971
 
 
 
 
8b709fd
 
 
 
 
 
 
a43049b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fc55ef6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import gradio as gr
from langchain_community.document_loaders import PyPDFLoader, DirectoryLoader
from langchain.prompts import PromptTemplate
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_community.llms import CTransformers
from langchain.chains import RetrievalQA

DB_FAISS_PATH = 'vectorstore/db_faiss'

custom_prompt_template = """Use the following pieces of information to answer the user's question.
If you don't know the answer, just say that you don't know, don't try to make up an answer.

Context: {context}
Question: {question}

Only return the helpful answer below and nothing else.
Helpful answer:
"""

def set_custom_prompt():
    prompt = PromptTemplate(template=custom_prompt_template,
                            input_variables=['context', 'question'])
    return prompt

def load_llm():
    llm = CTransformers(
        model="TheBloke/Llama-2-7B-Chat-GGML",
        model_type="llama",
        max_new_tokens=512,
        temperature=0.5
    )
    return llm

def qa_bot(query):
    embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
                                       model_kwargs={'device': 'cpu'})
    db = FAISS.load_local(DB_FAISS_PATH, embeddings)
    llm = load_llm()
    qa_prompt = set_custom_prompt()
    qa = RetrievalQA.from_chain_type(llm=llm,
                                     chain_type='stuff',
                                     retriever=db.as_retriever(search_kwargs={'k': 2}),
                                     return_source_documents=True,
                                     chain_type_kwargs={'prompt': qa_prompt}
                                     )
    response = qa({'query': query})
    return response['result'], response['source_documents']

def main(query):
    answer, sources = qa_bot(query)
    if sources:
        answer += f"\nSources: {sources}"
    else:
        answer += "\nNo sources found"
    return answer

iface = gr.Interface(fn=main, 
                     inputs=gr.inputs.Textbox(label="Enter your medical query"),
                     outputs=gr.outputs.Textbox(label="Answer"),
                     title="Medical Bot",
                     description="Ask any medical query and get an answer with sources if available.")
iface.launch()