test / app.py
micole66's picture
Update app.py
13bbeb4
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
def get_pipe(name):
tokenizer = AutoTokenizer.from_pretrained(name)
model = AutoModelForSeq2SeqLM.from_pretrained(name)
pipe = pipeline(
"summarization", model=model, tokenizer=tokenizer, framework="pt"
)
return pipe
model_names = ['bigscience/T0_3B'] #, 'bigscience/T0p', 'bigscience/T0pp']
#model_names = ['bigscience/T0_3B','bigscience/T0'] #, 'bigscience/T0p', 'bigscience/T0pp']
pipes = [get_pipe(name) for name in model_names]
def _fn(text, do_sample, min_length, max_length, temperature, top_p, pipe):
out = pipe(
text,
do_sample=do_sample,
min_length=min_length,
max_length=max_length,
temperature=temperature,
top_p=top_p,
truncation=True,
)
return out[0]["summary_text"]
def fn(*args):
return [_fn(*args, pipe=pipe) for pipe in pipes]
import gradio as gr
interface = gr.Interface(
fn,
inputs=[
gr.inputs.Textbox(lines=10, label="input text"),
gr.inputs.Checkbox(label="do_sample", default=True),
gr.inputs.Slider(1, 128, step=1, default=64, label="min_length"),
gr.inputs.Slider(1, 128, step=1, default=64, label="max_length"),
gr.inputs.Slider(0.0, 1.0, step=0.1, default=1, label="temperature"),
gr.inputs.Slider(0.0, 1.0, step=0.1, default=1, label="top_p"),
],
outputs=[
gr.outputs.Textbox(label=f"output by {name}") for name in model_names
],
#examples=[[ex] for ex in examples],
title="T0 playground",
description="""
This is a playground for playing around with T0 models.
See https://huggingface.co/bigscience/T0 for more details
""",
)
interface.launch()