import os import io import base64 import gradio as gr from stability_sdk.api import Context from stability_sdk.animation import AnimationArgs, Animator from dotenv import load_dotenv load_dotenv(".env") STABILITY_HOST = "grpc.stability.ai:443" #STABILITY_KEY = os.getenv("STABILITY_KEY") def anim(f_promt, s_promt, stability_key): # Connect to Stability API context = Context(STABILITY_HOST, stability_key) # Test the connection context.get_user_info() print("Connection successfuly!") # Configure the animation args = AnimationArgs() args.interpolate_prompts = True args.locked_seed = True args.max_frames = 20 args.seed = 42 args.strength_curve = "0:(0)" args.diffusion_cadence_curve = "0:(4)" args.cadence_interp = "film" animation_prompts = { 0: f_promt, 10: s_promt, } negative_prompt = "" # Create Animator object to orchestrate the rendering animator = Animator( api_context=context, animation_prompts=animation_prompts, negative_prompt=negative_prompt, args=args ) # Render each frame of animation for idx, frame in enumerate(animator.render()): # Convert PIL Image to base64 buffered = io.BytesIO() frame.save(buffered, format="PNG") img_str = base64.b64encode(buffered.getvalue()).decode() yield img_str # Yield the base64 string of the generated frame with gr.Blocks() as demo: gr.Markdown("Stability Animation") f_promt = gr.Textbox(label="First Prompt", value="a photo of a cute cat") s_promt = gr.Textbox(label="Second Prompt", value="a photo of a cute dog") stability_key = gr.Textbox(label="Stability Key", value="") outimg = gr.Files(label="Generated Files") btn = gr.Button('Anim') btn.click(fn=anim, inputs=[f_promt, s_promt, stability_key], outputs=[outimg],api_name="AnimAPI") demo.launch()