Spaces:
Running
on
L40S
Running
on
L40S
Update app.py
Browse files
app.py
CHANGED
@@ -98,21 +98,38 @@ def run_xportrait(source_image, driving_video, seed, uc_scale, best_frame, out_f
|
|
98 |
return f"An error occurred: {e}", None
|
99 |
|
100 |
# Set up Gradio interface
|
101 |
-
|
|
|
|
|
|
|
|
|
|
|
102 |
with gr.Column(elem_id="col-container"):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
with gr.Row():
|
104 |
with gr.Column():
|
105 |
-
source_image = gr.Image(label="Source Image", type="filepath")
|
106 |
-
driving_video = gr.Video(label="Driving Video")
|
107 |
with gr.Row():
|
108 |
-
|
109 |
-
|
110 |
with gr.Group():
|
111 |
with gr.Row():
|
112 |
best_frame = gr.Number(value=36, label="Best Frame")
|
113 |
out_frames = gr.Number(value=-1, label="Out Frames")
|
114 |
with gr.Accordion("Driving video Frames"):
|
115 |
-
driving_frames = gr.Gallery(show_label=True, columns=6, height=512)
|
|
|
|
|
|
|
116 |
with gr.Row():
|
117 |
num_mix = gr.Number(value=4, label="Number of Mix")
|
118 |
ddim_steps = gr.Number(value=30, label="DDIM Steps")
|
@@ -120,6 +137,13 @@ with gr.Blocks() as demo:
|
|
120 |
with gr.Column():
|
121 |
video_output = gr.Video(label="Output Video")
|
122 |
status = gr.Textbox(label="status")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
123 |
|
124 |
driving_video.upload(
|
125 |
fn = extract_frames_with_labels,
|
|
|
98 |
return f"An error occurred: {e}", None
|
99 |
|
100 |
# Set up Gradio interface
|
101 |
+
css="""
|
102 |
+
div#frames-gallery{
|
103 |
+
overflow: scroll!important;
|
104 |
+
}
|
105 |
+
"""
|
106 |
+
with gr.Blocks(css=css) as demo:
|
107 |
with gr.Column(elem_id="col-container"):
|
108 |
+
gr.Markdown("# X-Portrait: Expressive Portrait Animation with Hierarchical Motion Attention")
|
109 |
+
gr.HTML("""
|
110 |
+
<div style="display:flex;column-gap:4px;">
|
111 |
+
<a href='https://github.com/bytedance/X-Portrait'>
|
112 |
+
<img src='https://img.shields.io/badge/GitHub-Repo-blue'>
|
113 |
+
</a>
|
114 |
+
<a href='https://byteaigc.github.io/x-portrait/'>
|
115 |
+
<img src='https://img.shields.io/badge/Project-Page-green'>
|
116 |
+
</a>
|
117 |
+
</div>
|
118 |
+
""")
|
119 |
with gr.Row():
|
120 |
with gr.Column():
|
|
|
|
|
121 |
with gr.Row():
|
122 |
+
source_image = gr.Image(label="Source Image", type="filepath")
|
123 |
+
driving_video = gr.Video(label="Driving Video")
|
124 |
with gr.Group():
|
125 |
with gr.Row():
|
126 |
best_frame = gr.Number(value=36, label="Best Frame")
|
127 |
out_frames = gr.Number(value=-1, label="Out Frames")
|
128 |
with gr.Accordion("Driving video Frames"):
|
129 |
+
driving_frames = gr.Gallery(show_label=True, columns=6, height=512, elem_id="frames-gallery")
|
130 |
+
with gr.Row():
|
131 |
+
seed = gr.Number(value=999, label="Seed")
|
132 |
+
uc_scale = gr.Number(value=5, label="UC Scale")
|
133 |
with gr.Row():
|
134 |
num_mix = gr.Number(value=4, label="Number of Mix")
|
135 |
ddim_steps = gr.Number(value=30, label="DDIM Steps")
|
|
|
137 |
with gr.Column():
|
138 |
video_output = gr.Video(label="Output Video")
|
139 |
status = gr.Textbox(label="status")
|
140 |
+
gr.Examples(
|
141 |
+
examples=[
|
142 |
+
["./assets/source_image.png", "./assets/driving_video.mp4"]
|
143 |
+
],
|
144 |
+
inputs=[source_image, driving_video]
|
145 |
+
)
|
146 |
+
|
147 |
|
148 |
driving_video.upload(
|
149 |
fn = extract_frames_with_labels,
|