fffiloni commited on
Commit
2ee4092
1 Parent(s): 783a96c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -26
app.py CHANGED
@@ -44,22 +44,18 @@ example_video_dir = "assets/examples/driving"
44
 
45
  #################### interface logic ####################
46
 
47
- # Define components first
48
 
49
- output_video1 = gr.Video()
50
- output_video_concat1 = gr.Video()
51
 
52
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
53
-
54
- with gr.Row():
55
- # Examples
56
  gr.Markdown("""
57
- ## 🤗 This is the gradio demo for LivePortrait for video.
58
  Please upload or use a webcam to get a Source Portrait Video (any aspect ratio) and upload a Driving Video (1:1 aspect ratio, or any aspect ratio with do crop (driving video) checked).
59
  """)
60
- # for video portrait
61
  with gr.Row():
62
- with gr.Accordion(open=True, label="Video Portrait"):
 
63
  source_video_input = gr.Video()
64
  gr.Examples(
65
  examples=[
@@ -72,7 +68,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
72
  inputs=[source_video_input],
73
  cache_examples=False,
74
  )
75
- with gr.Accordion(open=True, label="Driving Video"):
76
  video_input = gr.Video()
77
  gr.Examples(
78
  examples=[
@@ -85,26 +81,18 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
85
  inputs=[video_input],
86
  cache_examples=False,
87
  )
88
- with gr.Row():
89
- with gr.Accordion(open=False, label="source Animation Instructions and Options"):
90
  gr.Markdown(load_description("assets/gradio_description_animation.md"))
91
  with gr.Row():
92
  flag_relative_input = gr.Checkbox(value=True, label="relative motion")
93
  flag_do_crop_input = gr.Checkbox(value=True, label="do crop (source)")
94
  flag_remap_input = gr.Checkbox(value=True, label="paste-back")
95
  flag_crop_driving_video_input = gr.Checkbox(value=False, label="do crop (driving video)")
96
- with gr.Row():
97
- with gr.Column():
98
- process_button_source_animation = gr.Button("🚀 Animate video", variant="primary")
99
- with gr.Column():
100
- process_button_reset = gr.ClearButton([source_video_input, video_input, output_video1, output_video_concat1], value="🧹 Clear")
101
- with gr.Row():
102
- with gr.Column():
103
- with gr.Accordion(open=True, label="The animated video in the original image space"):
104
- output_video1.render()
105
  with gr.Column():
106
- with gr.Accordion(open=True, label="The animated video"):
107
- output_video_concat1.render()
108
 
109
  # binding functions for buttons
110
 
@@ -119,8 +107,9 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
119
  flag_crop_driving_video_input
120
  ],
121
  outputs=[output_video1, output_video_concat1],
122
- show_progress=True
 
123
  )
124
 
125
 
126
- demo.launch()
 
44
 
45
  #################### interface logic ####################
46
 
47
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
48
 
49
+ with gr.Column():
 
50
 
 
 
 
 
51
  gr.Markdown("""
52
+ ## 🤗 This is the gradio demo for Vid2Vid LivePortrait.
53
  Please upload or use a webcam to get a Source Portrait Video (any aspect ratio) and upload a Driving Video (1:1 aspect ratio, or any aspect ratio with do crop (driving video) checked).
54
  """)
55
+
56
  with gr.Row():
57
+ with gr.Column():
58
+
59
  source_video_input = gr.Video()
60
  gr.Examples(
61
  examples=[
 
68
  inputs=[source_video_input],
69
  cache_examples=False,
70
  )
71
+
72
  video_input = gr.Video()
73
  gr.Examples(
74
  examples=[
 
81
  inputs=[video_input],
82
  cache_examples=False,
83
  )
 
 
84
  gr.Markdown(load_description("assets/gradio_description_animation.md"))
85
  with gr.Row():
86
  flag_relative_input = gr.Checkbox(value=True, label="relative motion")
87
  flag_do_crop_input = gr.Checkbox(value=True, label="do crop (source)")
88
  flag_remap_input = gr.Checkbox(value=True, label="paste-back")
89
  flag_crop_driving_video_input = gr.Checkbox(value=False, label="do crop (driving video)")
90
+ with gr.Row():
91
+ process_button_source_animation = gr.Button("🚀 Animate video", variant="primary")
92
+ process_button_reset = gr.ClearButton([source_video_input, video_input, output_video1, output_video_concat1], value="🧹 Clear")
 
 
 
 
 
 
93
  with gr.Column():
94
+ output_video1 = gr.Video(label="The animated video in the original image space")
95
+ output_video_concat1 = gr.Video(label="The animated video")
96
 
97
  # binding functions for buttons
98
 
 
107
  flag_crop_driving_video_input
108
  ],
109
  outputs=[output_video1, output_video_concat1],
110
+ show_progress=True,
111
+ show_api=False
112
  )
113
 
114
 
115
+ demo.queue(max_size=10).launch(show_api=False)