Poem / app.py
Boyxavi's picture
Update app.py
35352a6 verified
raw
history blame contribute delete
6.85 kB
# %% [code] {"execution":{"iopub.status.busy":"2024-12-20T13:00:27.695987Z","iopub.execute_input":"2024-12-20T13:00:27.696914Z","iopub.status.idle":"2024-12-20T13:00:42.851218Z","shell.execute_reply.started":"2024-12-20T13:00:27.696874Z","shell.execute_reply":"2024-12-20T13:00:42.850368Z"}}
# %% [code] {"execution":{"iopub.status.busy":"2024-12-20T12:56:25.928197Z","iopub.execute_input":"2024-12-20T12:56:25.928858Z","iopub.status.idle":"2024-12-20T13:00:06.533130Z","shell.execute_reply.started":"2024-12-20T12:56:25.928822Z","shell.execute_reply":"2024-12-20T13:00:06.532150Z"}}
from transformers import AutoModelForCausalLM, AutoTokenizer
# Load model and tokenizer
model = AutoModelForCausalLM.from_pretrained(
"microsoft/phi-3-mini-4k-instruct",
device_map="cpu",
torch_dtype="auto",
trust_remote_code=True
)
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-3-mini-4k-instruct")
# Test generation
prompt = "Compose a captivating poem in 8-9 lines about the beauty of structured poetry. Begin with a vivid image to draw the reader in, explore emotions and metaphors in the middle, and end with a resonant and thought-provoking conclusion. Use poetic devices like rhyme, alliteration, and rhythm to enhance the flow and make the poem memorable."
# Tokenize the input
input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to("cpu")
outputs = model.generate(input_ids=input_ids, max_new_tokens=300, temperature=0.8)
poem = tokenizer.decode(outputs[0], skip_special_tokens=True)
print(poem)
# %% [code] {"execution":{"iopub.status.busy":"2024-12-20T13:21:41.328638Z","iopub.execute_input":"2024-12-20T13:21:41.329062Z","iopub.status.idle":"2024-12-20T13:21:51.115179Z","shell.execute_reply.started":"2024-12-20T13:21:41.329027Z","shell.execute_reply":"2024-12-20T13:21:51.114243Z"}}
import gradio as gr
from transformers import pipeline
# Load model and tokenizer using pipeline for more efficient memory management
pipe = pipeline(
"text-generation",
model="microsoft/phi-3-mini-4k-instruct",
tokenizer="microsoft/phi-3-mini-4k-instruct",
device_map="auto", # Let Transformers automatically choose the best device
torch_dtype="float16", # Use half-precision for faster inference
trust_remote_code=True
)
# Predefined conversation responses
conversations = {
"who built this": "This application was built by Adewuyi Ayomide, a passionate Machine Learning Engineer and Computer Science student at the University of Ibadan. He specializes in Natural Language Processing and has a keen interest in making AI more accessible and creative.",
"who built this application": "This application was built by Adewuyi Ayomide, a passionate Machine Learning Engineer and Computer Science student at the University of Ibadan. He specializes in Natural Language Processing and has a keen interest in making AI more accessible and creative.",
"who built this?": "This application was built by Adewuyi Ayomide, a passionate Machine Learning Engineer and Computer Science student at the University of Ibadan. He specializes in Natural Language Processing and has a keen interest in making AI more accessible and creative.",
"who built this application?": "This application was built by Adewuyi Ayomide, a passionate Machine Learning Engineer and Computer Science student at the University of Ibadan. He specializes in Natural Language Processing and has a keen interest in making AI more accessible and creative.",
"who created you": "I was created by Adewuyi Ayomide, a talented Machine Learning Engineer and Computer Science student at the University of Ibadan. He developed me to help people explore the beauty of poetry through AI.",
"who created you?": "I was created by Adewuyi Ayomide, a talented Machine Learning Engineer and Computer Science student at the University of Ibadan. He developed me to help people explore the beauty of poetry through AI.",
"hey": "Hello! πŸ‘‹ I'm your AI poetry companion. Would you like me to create a poem for you?",
"hi": "Hi there! πŸ‘‹ Ready to explore the world of poetry together?",
"hello": "Hello! πŸ‘‹ I'm excited to create some poetry with you today!",
"help": "I can help you create beautiful poems! Just share a topic, emotion, or idea, and I'll craft a unique poem for you. You can also ask me about who created me or just chat casually.",
"wow": "Thank you! I'm glad you're impressed. Would you like me to create another poem for you? Just share any topic that interests you!",
"amazing": "I'm delighted you think so! Would you like to explore more poetry together? Just give me a theme or emotion to work with!",
"awesome": "Thank you for the kind words! I enjoy creating poems. What topic would you like me to write about next?",
"beautiful": "I'm happy you enjoyed it! Poetry is a beautiful way to express emotions. Would you like another poem?",
"nice": "Thank you! I'm here to create more poems whenever you're ready. Just share a topic with me!",
"great": "I'm glad you liked it! Ready for another poetic journey? Just give me a theme to work with!",
"good": "Thank you! I enjoy crafting poems. Would you like to try another topic?",
"thank you": "You're welcome! It's my pleasure to create poems. Feel free to request another one whenever you'd like!",
"thanks": "You're welcome! Ready for another poem whenever you are!"
}
def generate_poem(prompt, history=None):
if history is None:
history = [] # Initialize history as an empty list if None is passed
# Check for predefined conversation responses
prompt_lower = prompt.strip().lower()
if prompt_lower in conversations:
response = conversations[prompt_lower]
history.append(("You", prompt))
history.append(("AI", response))
return history, history
# Ensure the prompt is not empty
if not prompt.strip():
return history + [("You", "Please provide a topic or idea for the poem.")], history
try:
# Generate the poem using the pipeline with optimized parameters
outputs = pipe(prompt, max_new_tokens=500, temperature=0.7, do_sample=True)
poem = outputs[0]['generated_text'] # Extract generated text from pipeline output
except Exception as e:
poem = f"An error occurred: {e}"
# Add the user prompt and the poem response to the history
history.append(("You", prompt))
history.append(("AI", poem))
return history, history
# Create the Gradio interface
interface = gr.Interface(
fn=generate_poem,
inputs=["text", "state"],
outputs=["chatbot", "state"],
title="Love Poem Generator Chatbot",
description="Chat with the AI, and it will generate love poems for you!"
)
# Launch the Gradio interface
interface.launch(share=True) # Use share=True to get a public link