samroni's picture
Update app.py
8743f77
raw
history blame
1.19 kB
import torch
import gradio as gr
import requests
import os
import transformers
from transformers import AutoModelWithLMHead, AutoTokenizer, AutoModelForCausalLM, pipeline
from transformers import GPT2Tokenizer, GPT2Model
tokenizer = AutoTokenizer.from_pretrained("samroni/puisi_model_gpt2_small")
model = AutoModelForCausalLM.from_pretrained("samroni/puisi_model_gpt2_small")
pipe = pipeline('text-generation', model="samroni/puisi_model_gpt2_small", tokenizer=tokenizer)
def text_generation(input_text, seed):
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
torch.manual_seed(seed) # Max value: 18446744073709551615
outputs = model.generate(input_ids, do_sample=True, max_length=100)
generated_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)
return generated_text
title = "Indonesia Poem Generator Demo GPT2"
description = "Poem Generator "
gr.Interface(
text_generation,
[gr.inputs.Textbox(lines=2, label="Enter input text"), gr.inputs.Number(default=10, label="Enter seed number")],
[gr.outputs.Textbox(type="auto", label="Text Generated")],
title=title,
description=description,
theme="huggingface"
).launch()