File size: 513 Bytes
9502b13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
from transformers import AutoTokenizer, AutoModelForCausalLM
import gradio as gr


tokenizer = AutoTokenizer.from_pretrained("distilgpt2")
model = AutoModelForCausalLM.from_pretrained("distilgpt2")


gen_pipeline = pipeline(task = "text-generation", model=model, tokenizer=tokenizer)
get_generated_text = lambda x: gen_pipeline(x)[0]['generated_text']


demo = gr.Interface(
    inputs = gr.TextBox(label = 'Enter A series of Text and Generate more',lines = 2),
    outputs = get_generated_text,
)


demo.launch()