zcxu-eric commited on
Commit
a2e3a7d
2 Parent(s): 4c1d95e 6c1b85a

Merge branch 'main' of https://huggingface.co/spaces/zcxu-eric/magicanimate

Browse files
Files changed (1) hide show
  1. app.py +16 -5
app.py CHANGED
@@ -12,6 +12,7 @@ import argparse
12
  import imageio
13
  import numpy as np
14
  import gradio as gr
 
15
  from PIL import Image
16
  from subprocess import PIPE, run
17
 
@@ -23,9 +24,15 @@ snapshot_download(repo_id="runwayml/stable-diffusion-v1-5", local_dir="./stable-
23
  snapshot_download(repo_id="stabilityai/sd-vae-ft-mse", local_dir="./sd-vae-ft-mse")
24
  snapshot_download(repo_id="zcxu-eric/MagicAnimate", local_dir="./MagicAnimate")
25
 
 
 
 
 
 
 
26
  animator = MagicAnimate()
27
 
28
- def animate(reference_image, motion_sequence_state, seed, steps, guidance_scale):
29
  return animator(reference_image, motion_sequence_state, seed, steps, guidance_scale)
30
 
31
  with gr.Blocks() as demo:
@@ -70,13 +77,15 @@ with gr.Blocks() as demo:
70
  motion_sequence.upload(
71
  read_video,
72
  motion_sequence,
73
- motion_sequence
 
74
  )
75
  # when `first_frame` is updated
76
  reference_image.upload(
77
  read_image,
78
  reference_image,
79
- reference_image
 
80
  )
81
  # when the `submit` button is clicked
82
  submit.click(
@@ -88,6 +97,7 @@ with gr.Blocks() as demo:
88
  # Examples
89
  gr.Markdown("## Examples")
90
  gr.Examples(
 
91
  examples=[
92
  ["inputs/applications/source_image/monalisa.png", "inputs/applications/driving/densepose/running.mp4"],
93
  ["inputs/applications/source_image/126313770_0_final.png", "inputs/applications/driving/densepose/demo4.mp4"],
@@ -96,8 +106,9 @@ with gr.Blocks() as demo:
96
  ["inputs/applications/source_image/multi1_source.png", "inputs/applications/driving/densepose/multi_dancing.mp4"],
97
  ],
98
  inputs=[reference_image, motion_sequence],
99
- outputs=animation
 
100
  )
101
 
102
- demo.queue(max_size=100)
103
  demo.launch(share=True)
 
12
  import imageio
13
  import numpy as np
14
  import gradio as gr
15
+ import os
16
  from PIL import Image
17
  from subprocess import PIPE, run
18
 
 
24
  snapshot_download(repo_id="stabilityai/sd-vae-ft-mse", local_dir="./sd-vae-ft-mse")
25
  snapshot_download(repo_id="zcxu-eric/MagicAnimate", local_dir="./MagicAnimate")
26
 
27
+ is_spaces = True if "SPACE_ID" in os.environ else False
28
+ true_for_shared_ui = False #This will be true only if you are in a shared UI
29
+ if(is_spaces):
30
+ true_for_shared_ui = True if "zcxu-eric/magicanimate" in os.environ['SPACE_ID'] else False
31
+
32
+
33
  animator = MagicAnimate()
34
 
35
+ def animate(reference_image, motion_sequence_state, seed=1, steps=25, guidance_scale=7.5):
36
  return animator(reference_image, motion_sequence_state, seed, steps, guidance_scale)
37
 
38
  with gr.Blocks() as demo:
 
77
  motion_sequence.upload(
78
  read_video,
79
  motion_sequence,
80
+ motion_sequence,
81
+ queue=False
82
  )
83
  # when `first_frame` is updated
84
  reference_image.upload(
85
  read_image,
86
  reference_image,
87
+ reference_image,
88
+ queue=False
89
  )
90
  # when the `submit` button is clicked
91
  submit.click(
 
97
  # Examples
98
  gr.Markdown("## Examples")
99
  gr.Examples(
100
+ fn=animate,
101
  examples=[
102
  ["inputs/applications/source_image/monalisa.png", "inputs/applications/driving/densepose/running.mp4"],
103
  ["inputs/applications/source_image/126313770_0_final.png", "inputs/applications/driving/densepose/demo4.mp4"],
 
106
  ["inputs/applications/source_image/multi1_source.png", "inputs/applications/driving/densepose/multi_dancing.mp4"],
107
  ],
108
  inputs=[reference_image, motion_sequence],
109
+ outputs=animation,
110
+ cache_examples=true_for_shared_ui
111
  )
112
 
113
+ demo.queue(max_size=15)
114
  demo.launch(share=True)