File size: 5,471 Bytes
66a5d97
 
4771e5d
5f2a839
31a5080
ed59139
 
 
e802041
ed59139
e802041
ed59139
08c8208
ed59139
 
e802041
398ee6b
ed59139
8728056
a87e997
 
 
 
 
 
 
 
 
 
 
 
 
aac8422
 
 
 
 
 
 
a532c03
398ee6b
5f2a839
398ee6b
 
 
5f2a839
398ee6b
 
 
b62c148
 
 
8014203
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
98e132f
 
8014203
 
 
23734c2
 
aac8422
23734c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8014203
 
 
 
 
 
 
 
 
 
 
 
 
23734c2
c48fd5f
 
e4e6e3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
import torch
import spaces  
import os
IS_SPACES_ZERO = os.environ.get("SPACES_ZERO_GPU", "0") == "1"
IS_SPACE = os.environ.get("SPACE_ID", None) is not None

device = "cuda" if torch.cuda.is_available() else "cpu"
LOW_MEMORY = os.getenv("LOW_MEMORY", "0") == "1"
print(f"Using device: {device}")
print(f"low memory: {LOW_MEMORY}")
model_name = "ruslanmv/Medical-Llama3-8B"
# Move model and tokenizer to the CUDA device
model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
tokenizer.pad_token = tokenizer.eos_token
@spaces.GPU
def askme(symptoms, question):
    template = [
        {"role": "system", "content": "You are an AI Medical Assistant trained on a vast dataset of health information. Please be thorough and provide an informative answer. If you don't know the answer to a specific medical inquiry, advise seeking professional help."},
        {"role": "user", "content": f"Symptoms: {symptoms}\nQuestion: {question}\n"},
        {"role": "assistant", "content": "{assistant_response}\n"}
    ]

    prompt = tokenizer.apply_chat_template(template, tokenize=False, add_generation_prompt=True)
    inputs = tokenizer(prompt, return_tensors="pt").to(device)
    outputs = model.generate(**inputs, max_new_tokens=300, use_cache=True)
    response_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0].strip()
    return response_text


examples = [
    ["headache", "What are the possible causes of a headache?"],
    ["fever", "How can I treat a fever at home?"],
    ["cough", "What are the symptoms of a cough?"],
    ["chest pain", "What are the possible causes of chest pain?"],
]
   

# Example usage
symptoms = '''\
I'm a 35-year-old male and for the past few months, I've been experiencing fatigue,
increased sensitivity to cold, and dry, itchy skin.
'''
question = '''\
Could these symptoms be related to hypothyroidism?
If so, what steps should I take to get a proper diagnosis and discuss treatment options?
'''
examples = [
    [symptoms, question]
]

css = """
/* General Container Styles */
.gradio-container {
    font-family: "IBM Plex Sans", sans-serif;  position: fixed; /* Ensure full-screen coverage */
    top: 0;
    left: 0;
    width: 100vw;  /* Set width to 100% viewport width */
    height: 100vh; /* Set height to 100% viewport height */
    margin: 0;    /* Remove margins for full-screen effect */
    padding: 0;    /* Remove padding fol-screen background */
    background-color: #212529; /* Dark background color */
    color: #fff;    /* Light text color for better readability */
    overflow: hidden; /* Hide potential overflow content */
    background-image: url("https://huggingface.co/spaces/ruslanmv/AI-Medical-Chatbot/resolve/main/notebook/local/img/background.jpg"); /* Replace with your image path */
    background-size: cover; /* Stretch the image to cover the container */
    background-position: center; /* Center the image horizontally and vertically */
}

/* Button Styles */
.gr-button {
    color: white;
    background: #007bff; /* Use a primary color for the background */
    white-space: nowrap;
    border: none;
    padding: 10px 20px;
    border-radius: 8px;
    cursor: pointer;
    transition: background-color 0.3s, color 0.3s;
}
.gr-button:hover {
    background-color: #0056b3; /* Darken the background color on hover */
}

/* Output box styles */
.gradio-textbox {
    background-color: #343a40; /* Dark background color */
    color: #fff; /* Light text color for better readability */
    border-color: #343a40; /* Dark border color */
    border-radius: 8px;
}
"""

welcome_message = """# AI Medical Llama 3 Chatbot
Ask any medical question giving first your symptoms and get answers from our AI Medical Llama3 Chatbot
Developed by Ruslan Magana. Visit [https://ruslanmv.com/](https://ruslanmv.com/) for more information."""







symptoms_input = gr.Textbox(label="Symptoms")
question_input = gr.Textbox(label="Question")
answer_output = gr.Textbox(label="Answer")


iface = gr.Interface(
    fn=askme,
    inputs=[symptoms_input, question_input],
    outputs=answer_output,
    examples=examples,
    css=css,
    description=welcome_message  # Add the welcome message here
)

iface.launch()

'''

with gr.Blocks(css=css) as interface:
    gr.Markdown(welcome_message)  # Display the welcome message

    with gr.Row():
        with gr.Column():
            symptoms_input = gr.Textbox(label="Symptoms", placeholder="Enter symptoms here")
            question_input = gr.Textbox(label="Question", placeholder="Enter question here")
            generate_button = gr.Button("Ask Me", variant="primary")

    with gr.Row():
        answer_output = gr.Textbox(type="text", label="Answer")

interface.launch()
'''


'''
iface = gr.Interface(
    fn=askme,
    inputs=["text", "text"],
    outputs="text",
    examples=examples,
    title="Medical AI Chatbot",
    description="Ask me a medical question!"
)

iface.launch()
'''

'''

iface = gr.Interface(
    fn=askme,
    inputs=[
        gr.Textbox(label="Symptoms", placeholder="Enter symptoms here"),
        gr.Textbox(label="Question", placeholder="Enter question here")
    ],
    outputs="text",
    examples=examples,
    title="Medical AI Chatbot",
    description="Ask me a medical question!",
    css=css
)

iface.launch()
'''