Spaces:
Running
on
Zero
Running
on
Zero
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig | |
import torch | |
import spaces | |
import os | |
IS_SPACES_ZERO = os.environ.get("SPACES_ZERO_GPU", "0") == "1" | |
IS_SPACE = os.environ.get("SPACE_ID", None) is not None | |
device = "cuda" if torch.cuda.is_available() else "cpu" | |
LOW_MEMORY = os.getenv("LOW_MEMORY", "0") == "1" | |
print(f"Using device: {device}") | |
print(f"low memory: {LOW_MEMORY}") | |
model_name = "ruslanmv/Medical-Llama3-8B" | |
# Move model and tokenizer to the CUDA device | |
model = AutoModelForCausalLM.from_pretrained(model_name).to(device) | |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) | |
tokenizer.pad_token = tokenizer.eos_token | |
def askme(symptoms, question): | |
sys_message = '''\ | |
You are an AI Medical Assistant trained on a vast dataset of health information. Please be thorough and | |
provide an informative answer. If you don't know the answer to a specific medical inquiry, advise seeking professional help. | |
''' | |
content = symptoms + " " + question | |
messages = [{"role": "system", "content": sys_message}, {"role": "user", "content": content}] | |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) | |
inputs = tokenizer(prompt, return_tensors="pt").to(device) # Ensure inputs are on CUDA device | |
outputs = model.generate(**inputs, max_new_tokens=200, use_cache=True) | |
response_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0].strip() | |
# Remove system messages and content | |
# Extract and return the generated text, removing the prompt | |
# Extract only the assistant's response | |
#answer = response_text.split('<|im_start|>assistant')[-1].strip() | |
start_idx = response_text.find("<|im_start|>assistant") | |
end_idx = response_text.find("<|im_end|>", start_idx) | |
assistant_response = response_text[start_idx + len("<|im_start|>assistant"):end_idx] | |
return assistant_response.split(". ")[0] + " | |
# Example usage | |
symptoms = '''\ | |
I'm a 35-year-old male and for the past few months, I've been experiencing fatigue, | |
increased sensitivity to cold, and dry, itchy skin. | |
''' | |
question = '''\ | |
Could these symptoms be related to hypothyroidism? | |
If so, what steps should I take to get a proper diagnosis and discuss treatment options? | |
''' | |
examples = [ | |
[symptoms, question] | |
] | |
css = """ | |
/* General Container Styles */ | |
.gradio-container { | |
font-family: "IBM Plex Sans", sans-serif; position: fixed; /* Ensure full-screen coverage */ | |
top: 0; | |
left: 0; | |
width: 100vw; /* Set width to 100% viewport width */ | |
height: 100vh; /* Set height to 100% viewport height */ | |
margin: 0; /* Remove margins for full-screen effect */ | |
padding: 0; /* Remove padding fol-screen background */ | |
background-color: #212529; /* Dark background color */ | |
color: #fff; /* Light text color for better readability */ | |
overflow: hidden; /* Hide potential overflow content */ | |
background-image: url("https://huggingface.co/spaces/ruslanmv/AI-Medical-Chatbot/resolve/main/notebook/local/img/background.jpg"); /* Replace with your image path */ | |
background-size: cover; /* Stretch the image to cover the container */ | |
background-position: center; /* Center the image horizontally and vertically */ | |
} | |
/* Button Styles */ | |
.gr-button { | |
color: white; | |
background: #007bff; /* Use a primary color for the background */ | |
white-space: nowrap; | |
border: none; | |
padding: 10px 20px; | |
border-radius: 8px; | |
cursor: pointer; | |
transition: background-color 0.3s, color 0.3s; | |
} | |
.gr-button:hover { | |
background-color: #0056b3; /* Darken the background color on hover */ | |
} | |
/* Output box styles */ | |
.gradio-textbox { | |
background-color: #343a40; /* Dark background color */ | |
color: #fff; /* Light text color for better readability */ | |
border-color: #343a40; /* Dark border color */ | |
border-radius: 8px; | |
} | |
""" | |
welcome_message = """# AI Medical Llama 3 Chatbot | |
Ask any medical question giving first your symptoms and get answers from our AI Medical Llama3 Chatbot | |
Developed by Ruslan Magana. Visit [https://ruslanmv.com/](https://ruslanmv.com/) for more information.""" | |
symptoms_input = gr.Textbox(label="Symptoms") | |
question_input = gr.Textbox(label="Question") | |
answer_output = gr.Textbox(label="Answer") | |
iface = gr.Interface( | |
fn=askme, | |
inputs=[symptoms_input, question_input], | |
outputs=answer_output, | |
examples=examples, | |
css=css, | |
description=welcome_message # Add the welcome message here | |
) | |
iface.launch() | |
''' | |
with gr.Blocks(css=css) as interface: | |
gr.Markdown(welcome_message) # Display the welcome message | |
with gr.Row(): | |
with gr.Column(): | |
symptoms_input = gr.Textbox(label="Symptoms", placeholder="Enter symptoms here") | |
question_input = gr.Textbox(label="Question", placeholder="Enter question here") | |
generate_button = gr.Button("Ask Me", variant="primary") | |
with gr.Row(): | |
answer_output = gr.Textbox(type="text", label="Answer") | |
interface.launch() | |
''' | |
''' | |
iface = gr.Interface( | |
fn=askme, | |
inputs=["text", "text"], | |
outputs="text", | |
examples=examples, | |
title="Medical AI Chatbot", | |
description="Ask me a medical question!" | |
) | |
iface.launch() | |
''' | |
''' | |
iface = gr.Interface( | |
fn=askme, | |
inputs=[ | |
gr.Textbox(label="Symptoms", placeholder="Enter symptoms here"), | |
gr.Textbox(label="Question", placeholder="Enter question here") | |
], | |
outputs="text", | |
examples=examples, | |
title="Medical AI Chatbot", | |
description="Ask me a medical question!", | |
css=css | |
) | |
iface.launch() | |
''' | |