Spaces:
Sleeping
Sleeping
from transformers import pipeline, set_seed | |
from transformers import BioGptTokenizer, BioGptForCausalLM | |
from multilingual_translation import translate | |
from utils import lang_ids | |
import gradio as gr | |
model_list = [ | |
"microsoft/biogpt", | |
"microsoft/BioGPT-Large-PubMedQA" | |
] | |
lang_list = list(lang_ids.keys()) | |
def translate_to_english(text, base_lang): | |
if base_lang == "en": | |
return text | |
else: | |
new_text = translate("facebook/m2m100_418M", text, base_lang, "en") | |
return new_text | |
def biogpt( | |
prompt: str, | |
model_id: str, | |
max_length: int = 25, | |
num_return_sequences: int = 5, | |
base_lang: str = "en" | |
): | |
en_prompt = translate_to_english(prompt, base_lang) | |
model = BioGptForCausalLM.from_pretrained(model_id) | |
tokenizer = BioGptTokenizer.from_pretrained(model_id) | |
generator = pipeline('text-generation', model=model, tokenizer=tokenizer) | |
set_seed(42) | |
output = generator(en_prompt, max_length=max_length, num_return_sequences=num_return_sequences, do_sample=True) | |
output_dict = { | |
"1": output[0]['generated_text'], | |
"2": output[1]['generated_text'], | |
"3": output[2]['generated_text'], | |
"4": output[3]['generated_text'], | |
"5": output[4]['generated_text'] | |
} | |
output_text = f'{output_dict["1"]}\n\n{output_dict["2"]}\n\n{output_dict["3"]}\n\n{output_dict["4"]}\n\n{output_dict["5"]}' | |
return en_prompt, output_text | |
inputs = [ | |
gr.inputs.Textbox(label="Prompt", lines=5, default="COVID-19 is"), | |
gr.Dropdown(model_list, value="microsoft/biogpt", label="Model ID"), | |
gr.inputs.Slider(5, 100, 25, default=25, label="Max Length"), | |
gr.inputs.Slider(1, 10, 5, default=5, label="Num Return Sequences"), | |
gr.Dropdown(lang_list, value="en", label="Base Language") | |
] | |
outputs = [ | |
gr.outputs.Textbox(label="Prompt"), | |
gr.outputs.Textbox(label="Output") | |
] | |
examples = [ | |
["COVID-19 is", "microsoft/biogpt", 25, 5, "en"], | |
["Kanser", "microsoft/biogpt", 25, 5, "tr"] | |
] | |
title = " BioGPT: Generative Pre-trained Transformer for Biomedical Text Generation and Mining" | |
demo_app = gr.Interface( | |
biogpt, | |
inputs, | |
outputs, | |
title=title, | |
examples=examples, | |
cache_examples=True, | |
) | |
demo_app.launch(debug=True, enable_queue=True) | |