text_generation / app.py
aliabd's picture
aliabd HF staff
Create new file
8469365
raw
history blame
1.15 kB
# URL: https://huggingface.co/spaces/gradio/text_generation
# imports
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
# loading the model
tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
# defining the core function
def generate(text):
generation_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer)
result = generation_pipeline(text)
return result[0]["generated_text"]
# defining title, description and examples
title = "Text Generation with GPT-J-6B"
description = "This demo generates text using GPT-J 6B: a transformer model trained using Ben Wang's Mesh Transformer JAX."
examples = [
["The Moon's orbit around Earth has"],
["The smooth Borealis basin in the Northern Hemisphere covers 40%"],
]
# defining the interface
demo = gr.Interface(
fn=generate,
inputs=gr.inputs.Textbox(lines=5, label="Input Text"),
outputs=gr.outputs.Textbox(label="Generated Text"),
title=title,
description=description,
examples=examples,
)
# launching
demo.launch()