import gradio as gr from gradio_huggingfacehub_search import HuggingfaceHubSearch from gradio_rerun import Rerun def predict(search_in: str, file_path: str | list[str], url: str): if url: return url return file_path with gr.Blocks(css=".gradio-container { max-width: unset!important; }") as demo: with gr.Row(): with gr.Column(): search_in = HuggingfaceHubSearch( label="Search Models on Huggingface Hub and convert to rrd", placeholder="Search for models on Huggingface", search_type="model", ) with gr.Group(): file_path = gr.File(file_count="multiple", type="filepath") url = gr.Text( info="Or use a URL", label="URL", ) with gr.Column(): pass btn = gr.Button("Run", scale=0) with gr.Row(): rerun_viewer = Rerun(height=900) inputs = [search_in, file_path, url] outputs = [rerun_viewer] gr.on( [btn.click, search_in.submit], fn=predict, inputs=inputs, outputs=outputs, ) gr.on([btn.click, file_path.upload], fn=predict, inputs=inputs, outputs=outputs) gr.Examples( examples=[ [ None, ["./examples/rgbd.rrd"], None, ], [ None, ["./examples/rrt-star.rrd"], None, ], [ None, ["./examples/structure_from_motion.rrd"], None, ], [ None, ["./examples/structure_from_motion.rrd", "./examples/rrt-star.rrd"], None, ], ], fn=predict, inputs=inputs, outputs=outputs, run_on_click=True, ) if __name__ == "__main__": demo.launch()