chatbot / app.py
extoncs's picture
Update app.py
c6f339f verified
import streamlit as st
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer
# Set Streamlit page config
st.set_page_config(page_title="ChatDoctor", page_icon="🩺")
# Title
st.title("🩺 ChatDoctor - Medical Assistant")
# Load model and tokenizer
@st.cache_resource
def load_model():
# Ensure no GPU (CUDA) optimizations like quantization
model = AutoModelForCausalLM.from_pretrained("abhiyanta/chatDoctor", use_cache=True)
tokenizer = AutoTokenizer.from_pretrained("abhiyanta/chatDoctor")
return model, tokenizer
model, tokenizer = load_model()
# Alpaca-style prompt template
alpaca_prompt = "### Instruction:\n{0}\n\n### Input:\n{1}\n\n### Output:\n{2}"
# Text input for the user
user_input = st.text_input("Ask your medical question:")
# Button to trigger response
if st.button("Ask ChatDoctor"):
if user_input:
# Format the prompt
formatted_prompt = alpaca_prompt.format(
user_input,
"",
""
)
# Tokenize and ensure it's on CPU
inputs = tokenizer([formatted_prompt], return_tensors="pt").to("cpu")
# Stream the generated output
st.write("**ChatDoctor:**")
text_streamer = TextStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
with st.spinner('Generating response...'):
generated_ids = model.generate(**inputs, streamer=text_streamer, max_new_tokens=1000)
else:
st.warning("Please enter a question to ask ChatDoctor.")
# Footer
st.markdown("---")
st.caption("Powered by Hugging Face 🤗")