BioGPT / app.py
Harveenchadha's picture
Update app.py
4322c02
import torch
import gradio as gr
from transformers import BioGptTokenizer, BioGptForCausalLM, set_seed
tokenizer = BioGptTokenizer.from_pretrained("microsoft/biogpt")
model = BioGptForCausalLM.from_pretrained("microsoft/biogpt")
sentence = "COVID-19 is"
set_seed(42)
def get_beam_output(sentence):
inputs = tokenizer(sentence, return_tensors="pt")
with torch.no_grad():
beam_output = model.generate(**inputs,
min_length=100,
max_length=1024,
num_beams=5,
early_stopping=True
)
output=tokenizer.decode(beam_output[0], skip_special_tokens=True)
return output
txt1 = gr.Textbox(
label="Input",
lines=3,
)
txt2 = gr.Textbox(
label="Output",
lines=10,
)
demo = gr.Interface(fn=get_beam_output, inputs=txt1, outputs=txt2)
demo.launch()