ecarbo's picture
Add min_length=35
a379343
raw
history blame
1.01 kB
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
import gradio as gr
tokenizer = AutoTokenizer.from_pretrained("PlanTL-GOB-ES/gpt2-base-bne")
model = AutoModelForCausalLM.from_pretrained("PlanTL-GOB-ES/gpt2-base-bne")
def text_generation(input_text, seed):
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
torch.manual_seed(seed) # Max value: 18446744073709551615
outputs = model.generate(input_ids, do_sample=True, min_length=35, max_length=100)
generated_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)
return generated_text
title = "Demo Generador de Texto Espa帽ol GPT2"
description = "Aplicaci贸n para generar texto por ecarbo"
gr.Interface(
text_generation,
[gr.inputs.Textbox(lines=2, label="Ingrese el texto"), gr.inputs.Number(default=10, label="Ingrese numero semilla")],
[gr.outputs.Textbox(type="auto", label="Texto Generado")],
title=title,
description=description,
theme="huggingface"
).launch()