Amitesh007's picture
Update app.py
8993378 verified
import gradio as gr
from transformers import pipeline
# Ensure the correct Keras package is used
import tensorflow as tf
import tf_keras
# Define the model and the text generation function
fine_tuned_model = "Amitesh007/text_generation-finetuned-gpt2"
generator = pipeline('text-generation', model=fine_tuned_model)
def generate(text):
results = generator(text, num_return_sequences=2, max_length=100)
return results[0]["generated_text"], results[1]["generated_text"]
# Create the Gradio Blocks interface
with gr.Blocks() as demo:
gr.Markdown("# Text Generator GPT2 Pipeline")
gr.Markdown("This is a fine-tuned base GPT2 model inference, trained on a small 'Game of Thrones' dataset.")
with gr.Row():
with gr.Column():
input_text = gr.Textbox(lines=5, label="Input Text here....", placeholder="Type a sentence to start generating text")
generate_button = gr.Button("Generate")
with gr.Column():
output_text1 = gr.Textbox(label="Generated Text 1")
output_text2 = gr.Textbox(label="Generated Text 2")
examples = gr.Examples(
examples=[["A light snow had fallen the night before, and there were"],
["The pig face had been smashed in with a mace, but Tyrion"]],
inputs=input_text
)
generate_button.click(fn=generate, inputs=input_text, outputs=[output_text1, output_text2], concurrency_limit=2)
# Launch the interface with the max_threads parameter to control the total number of workers
demo.launch(max_threads=8)