Medical_Chatbot / streamlit.py
EAV123's picture
Upload 28 files
714a257 verified
import streamlit as st
from langchain.vectorstores import FAISS
from langchain.prompts import PromptTemplate
from langchain.llms import CTransformers
from langchain.chains import RetrievalQA
from src.prompt import *
from store_index import embeddings
vector_db = FAISS.load_local("Faiss_db", embeddings)
PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
chain_type_kwargs = {"prompt": PROMPT}
llm = CTransformers(model="model/llama-2-7b-chat.ggmlv3.q4_0.bin",
model_type="llama",
config={'max_new_tokens': 512, 'temperature': 0.8})
qa = RetrievalQA.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=vector_db.as_retriever(search_kwargs={'k': 2}),
return_source_documents=True,
chain_type_kwargs=chain_type_kwargs)
# Decorate the Streamlit app
st.set_page_config(page_title="Chatbot App", page_icon=":robot:", layout="centered")
st.title("Medical Chatbot with LLama2")
msg = st.text_input("Enter your message:")
if st.button("Send"):
result = qa({"query": msg})
st.write("Response:", result["result"])