NadaAljohani's picture
Update app.py
8a11fad verified
raw
history blame
No virus
1.8 kB
import gradio as gr
from transformers import pipeline
# Function to generate the story
def generate_story(title, model_name):
# Use text-generation pipeline from Hugging Face
generator = pipeline('text-generation', model=model_name)
# Generate the story based on the input title
story = generator(title,
max_length=230, # Set the maximum length for the generated text (story) to 230 tokens
no_repeat_ngram_size=3, # Avoid repeating any sequence of 3 words (to prevent repetitive text)
temperature=0.8, # Introduce some randomness; higher values make the output more random, lower makes it more deterministic
top_p=0.95 # Use nucleus sampling (top-p sampling) to focus on the top 95% of probable words, making the text more coherent
)
# Return the generated text
return story[0]['generated_text']
# Create the Gradio interface using gr.Interface
demo = gr.Interface(
fn=generate_story, # The function to run
inputs=[ # Inputs for the interface
gr.Textbox(label="Enter Story Title", placeholder="Type a title here..."), # Title input
gr.Dropdown(choices=['gpt2', 'gpt2-large', 'EleutherAI/gpt-neo-2.7B', 'EleutherAI/gpt-j-6B',
'maldv/badger-writer-llama-3-8b', 'EleutherAI/gpt-neo-1.3B'],
value='gpt2',
label="Choose Model") # Model selection input
],
outputs=gr.Textbox(label="Generated Story", lines=10), # Output for the generated story
title="AI Story Generator", # Title of the interface
description="Enter a title and choose a model to generate a short story" # A short description
)
# Launch the interface
demo.launch(share=True)