File size: 1,006 Bytes
76ceb1a
a57b498
76ceb1a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d0316ac
 
76ceb1a
 
d0316ac
 
 
 
 
 
 
 
 
 
4322c02
76ceb1a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import torch
import gradio as gr
from transformers import BioGptTokenizer, BioGptForCausalLM, set_seed

tokenizer = BioGptTokenizer.from_pretrained("microsoft/biogpt")
model = BioGptForCausalLM.from_pretrained("microsoft/biogpt")

sentence = "COVID-19 is"


set_seed(42)

def get_beam_output(sentence):
    inputs = tokenizer(sentence, return_tensors="pt")
    with torch.no_grad():
        beam_output = model.generate(**inputs,
                                    min_length=100,
                                    max_length=1024,
                                    num_beams=5,
                                    early_stopping=True
                                    )
    output=tokenizer.decode(beam_output[0], skip_special_tokens=True)
    return output


txt1 = gr.Textbox(
            label="Input",
            lines=3,
        )

txt2 = gr.Textbox(
            label="Output",
            lines=10,
        )

demo = gr.Interface(fn=get_beam_output, inputs=txt1, outputs=txt2)
demo.launch()