BioGPT / app.py
Harveenchadha's picture
Create app.py
76ceb1a
raw
history blame
807 Bytes
import torch
from transformers import BioGptTokenizer, BioGptForCausalLM, set_seed
tokenizer = BioGptTokenizer.from_pretrained("microsoft/biogpt")
model = BioGptForCausalLM.from_pretrained("microsoft/biogpt")
sentence = "COVID-19 is"
set_seed(42)
def get_beam_output(sentence):
inputs = tokenizer(sentence, return_tensors="pt")
with torch.no_grad():
beam_output = model.generate(**inputs,
min_length=100,
max_length=1024,
num_beams=5,
early_stopping=True
)
tokenizer.decode(beam_output[0], skip_special_tokens=True)
demo = gr.Interface(fn=get_beam_output, inputs="text", outputs="text")
demo.launch()