File size: 1,357 Bytes
390bc59
56c6169
390bc59
 
 
 
 
 
 
dfb2a73
390bc59
 
71b80db
56c6169
839fe82
56c6169
6854f2d
 
37f9e41
6854f2d
a82049a
 
1b1c078
a82049a
1b1c078
a82049a
1b1c078
a82049a
1b1c078
 
a82049a
 
 
 
 
71b80db
a82049a
 
98c17e1
 
 
7e2465f
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import gradio as gr
from gradio import mix

title = "GPT2"
description = "Gradio Demo for OpenAI GPT2. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."

article = "<p style='text-align: center'><a href='https://d4mucfpksywv.cloudfront.net/better-language-models/language_models_are_unsupervised_multitask_learners.pdf' target='_blank'>Language Models are Unsupervised Multitask Learners</a></p>"

examples = [
    ['Paris is the capital of',"gpt2-medium"]
]

io1 = gr.Interface.load("huggingface/distilgpt2")

io2 = gr.Interface.load("huggingface/gpt2-large")

io3 = gr.Interface.load("huggingface/gpt2-medium")

io4 = gr.Interface.load("huggingface/gpt2-xl")

def inference(text, model):
    if model == "gpt2-large":
        outtext = io2(text)
    elif model == "gpt2-medium":
        outtext = io3(text)
    elif model == "gpt2-xl":
        outtext = io4(text)
    else:
        outtext = io1(text)
    return outtext   
    
     

gr.Interface(
    inference, 
    [gr.inputs.Textbox(label="Input"),gr.inputs.Dropdown(choices=["distilgpt2","gpt2-medium","gpt2-large","gpt2-xl"], type="value", default="gpt2-medium", label="model")
], 
    gr.outputs.Textbox(label="Output"),
    examples=examples,
    article=article,
    title=title,
    description=description).launch(enable_queue=True)