osanseviero's picture
Change name so it's linked to the model repo
f0cd792
raw
history blame
991 Bytes
import os
import gradio as gr
import torch
from transformers import pipeline
print(f"Is CUDA available: {torch.cuda.is_available()}")
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0")
title = "BioGPT-Large Demo"
description = """
Check out the [BioGPT-Large model card](https://huggingface.co/microsoft/biogpt-large) for more info.
**Disclaimer:** this demo was made for research purposes only and should not be used for medical purposes.
"""
def inference(text):
output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
return [
output_biogpt,
]
io = gr.Interface(
inference,
gr.Textbox(lines=3),
outputs=[
gr.Textbox(lines=3, label="BioGPT-Large"),
],
title=title,
description=description,
examples=examples
)
io.launch()