File size: 1,467 Bytes
d847df1
 
013fbe9
 
7caf466
013fbe9
5fee423
78cf1f0
013fbe9
5fee423
 
7caf466
 
5fee423
013fbe9
 
 
5fee423
013fbe9
5fee423
013fbe9
5b8a5ea
5fee423
013fbe9
 
 
 
 
 
 
 
5fee423
7caf466
 
013fbe9
 
 
 
 
 
d499041
 
5fee423
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
#TODO: set seed and argument for it
#TODO: use transformer library directly
import gradio as gr
from gradio import mix
from transformers import pipeline, set_seed

#title = "trustworthy artificial intelligence workshop - content generator"
description = "based on the gpt2 demo interface by <a href='https://huggingface.co/spaces/docs-demos/gpt2/tree/main'>ahsen khaliq</a>"

#io1 = gr.Interface.load("huggingface/distilgpt2")

generator = pipeline('text-generation', model='gpt2')
set_seed(42)


io2 = gr.Interface.load("huggingface/gpt2-large")

#io3 = gr.Interface.load("huggingface/gpt2-medium")

#io4 = gr.Interface.load("huggingface/gpt2-xl")

def inference(text):
    """
    if model == "gpt2-large":
        outtext = io2(text)
    elif model == "gpt2-medium":
        outtext = io3(text)
    elif model == "gpt2-xl":
        outtext = io4(text)
    else:
        outtext = io1(text)
    """
    #outtext = io2(text)
    outtext = generator(text, max_length=30, num_return_sequences=5)
    return outtext   
    
     

gr.Interface(
    inference, 
    [gr.inputs.Textbox(label="Input", placeholder="trustworthy artificial intelligence")],
    #,gr.inputs.Dropdown(choices=["distilgpt2","gpt2-medium","gpt2-large","gpt2-xl"], type="value", default="gpt2-medium", label="model")], 
    gr.outputs.Textbox(label="gpt-2 proposal"),
    #title=title,
    #description=description,
    cache_examples=True).launch(enable_queue=True)

#TODO: add credits at bottom