import numpy as np import gradio as gr import os import base64 def get_base64(bin_file): with open(bin_file, "rb") as f: data = f.read() return base64.b64encode(data).decode() def conr_fn(character_sheets, pose_zip): os.system("rm character_sheet/*") os.system("rm result/*") os.system("rm poses/*") os.makedirs("character_sheet", exist_ok=True) for i, e in enumerate(character_sheets): with open(f"character_sheet/{i}.png", "wb") as f: e.seek(0) f.write(e.read()) e.seek(0) os.makedirs("poses", exist_ok=True) pose_zip.seek(0) open("poses.zip", "wb").write(pose_zip.read()) os.system(f"unzip -d poses poses.zip") os.system("sh infer.sh") return "output.mp4" with gr.Blocks() as ui: gr.Markdown("CoNR demo") gr.Markdown(" Open In Colab [GitHub](https://github.com/megvii-research/CoNR/)") gr.Markdown("Unofficial demo for [CoNR](https://transpchan.github.io/live3d/).") with gr.Row(): # with gr.Column(): # gr.Markdown("## Parse video") # gr.Markdown("TBD") with gr.Column(): gr.Markdown("## Animate character") gr.Markdown("Character sheet") character_sheets = gr.File(file_count="multiple") gr.Markdown("Pose zip") # Don't hack pose_video = gr.File(file_count="single") # os.system("sh download.sh") run = gr.Button("Run") video = gr.Video() run.click(fn=conr_fn, inputs=[character_sheets, pose_video], outputs=video) gr.Markdown("## Examples") sheets = "character_sheet_ponytail_example" gr.Examples(fn=conr_fn, inputs=[character_sheets, pose_video], outputs=video, examples=[[[os.path.join(sheets, x) for x in os.listdir(sheets)], "poses_template.zip"]], cache_examples=True, examples_per_page=1) # ui.launch() demo = ui demo.launch()