igea-qa / app.py
Detsutut's picture
Update app.py
b34f083 verified
raw
history blame contribute delete
No virus
4.38 kB
import gradio as gr
from ctransformers import AutoModelForCausalLM
from transformers import AutoTokenizer, pipeline
import torch
import re
# Initialize the model
model = AutoModelForCausalLM.from_pretrained("bmi-labmedinfo/Igea-1B-QA-v0.1-GGUF", model_file="unsloth.Q4_K_M.gguf", model_type="mistral", hf=True)
tokenizer = AutoTokenizer.from_pretrained( "bmi-labmedinfo/Igea-1B-QA-v0.1")
gen_pipeline = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer
)
# Define the function to generate text
def generate_text(question, opa, opb, opc, opd, context, temperature=1, max_new_tokens=30):
options_filtered = [option for option in [opa, opb, opc, opd] if option is not None and len(option)>0]
options_string = "; ".join([["A) ","B) ","C) ","D) "][i]+options_filtered[i] for i in range(len(options_filtered))])+"."
closed_prompt = """
### Contesto:
{}
### Domanda:
{}
### Opzioni:
{}
### Risposta:
"""
closed_prompt_no_context = """
### Domanda:
{}
### Opzioni:
{}
### Risposta:
"""
open_prompt = """
### Domanda:
{}
### Risposta:
"""
#valid context, valid options
if context is not None and len(context)>1 and len(options_filtered)>1:
prompt = closed_prompt.format(context, question, options_string)
#invalid context, valid options
elif context is None or len(context)<1 and len(options_filtered)>1:
prompt = closed_prompt_no_context.format(question, options_string)
#invalid context, invalid options
else:
prompt = open_prompt.format(question)
print(prompt)
output = gen_pipeline(
prompt,
max_new_tokens=max_new_tokens,
temperature=temperature,
return_full_text = False
)
generated_text = output[0]['generated_text']
return f"<span>{question} </span><b style='color: blue;'>{generated_text}</b>"
# Create the Gradio interface
question = gr.Textbox(lines=1, placeholder="L'ostruzione uretrale cronica dovuta a iperplasia prismatica benigna può portare al seguente cambiamento nel parenchima renale", label="Domanda (Opzioni facoltative)")
opa = gr.Textbox(lines=1, placeholder="Iperplasia", label="A:")
opb = gr.Textbox(lines=1, placeholder="Iperofia", label="B:")
opc = gr.Textbox(lines=1, placeholder="Atrofia", label="C:")
opd = gr.Textbox(lines=1, placeholder="Displasia", label="D:")
context = gr.Textbox(lines=2, placeholder="L'ostruzione uretrale cronica dovuta a calcoli urinari, iperofia prostatica, tumori, gravidanza normale, tumori, prolasso uterino o disturbi funzionali causano idronefrosi che per definizione viene utilizzata per descrivere la dilatazione della pelvi renale e dei calcoli associati ad atrofia progressiva del rene dovuta a ostruzione dell'uretra. Deflusso di urina. Fare riferimento a Robbins 7yh/9,1012,9/e. P950.", label="Contesto (facoltativo)")
temperature = gr.Slider(minimum=0.1, maximum=2.0, value=1.0, step=0.1, label="Temperature")
with gr.Blocks(css="#outbox { border-radius: 8px !important; border: 1px solid #e5e7eb !important; padding: 8px !important; text-align:center !important;}") as iface:
gr.Markdown("# Igea Question Answering Interface ⚕️🩺")
gr.Markdown("🐢💬 To guarantee a reasonable througput (<1 min to answer with default settings), this space employs a **GGUF quantized version of [Igea 1B](https://huggingface.co/bmi-labmedinfo/Igea-1B-v0.0.1)**, optimized for **hardware-limited, CPU-only machines** like the free-tier HuggingFace space.")
gr.Markdown("⚠️ Read the **[bias, risks and limitations](https://huggingface.co/bmi-labmedinfo/Igea-1B-v0.0.1#%F0%9F%9A%A8%E2%9A%A0%EF%B8%8F%F0%9F%9A%A8-bias-risks-and-limitations-%F0%9F%9A%A8%E2%9A%A0%EF%B8%8F%F0%9F%9A%A8)** of Igea before use!")
with gr.Group():
question.render()
with gr.Row():
opa.render()
opb.render()
opc.render()
opd.render()
context.render()
with gr.Accordion("Advanced Options", open=False):
temperature.render()
output = gr.HTML(label="Answer",elem_id="outbox")
btn = gr.Button("Answer")
btn.click(generate_text, [question, opa, opb, opc, opd, context, temperature], output)
# Launch the interface
if __name__ == "__main__":
iface.launch(inline=True)