Narsil's picture
Narsil HF staff
Update app.py
be9d28f
raw
history blame
2.2 kB
import os
import gradio as gr
import datetime
import tempfile
from huggingface_hub import hf_hub_download
def download_very_slow(repo_id):
os.environ.pop("HF_TRANSFER", None)
os.environ["HF_CHUNK_SIZE"] = "1024"
with tempfile.TemporaryDirectory() as workdir:
hf_hub_download(
repo_id,
filename="pytorch_model.bin",
force_download=True,
cache_dir=workdir,
)
def download_slow(repo_id):
os.environ.pop("HF_TRANSFER", None)
os.environ["HF_CHUNK_SIZE"] = "10485760"
with tempfile.TemporaryDirectory() as workdir:
hf_hub_download(
repo_id,
filename="pytorch_model.bin",
force_download=True,
cache_dir=workdir,
)
def download_fast(repo_id):
os.environ["HF_TRANSFER"] = "1"
with tempfile.TemporaryDirectory() as workdir:
hf_hub_download(
repo_id,
filename="pytorch_model.bin",
force_download=True,
cache_dir=workdir,
)
def download(repo_id):
start = datetime.datetime.now()
download_very_slow(repo_id)
taken_very_slow = datetime.datetime.now() - start
start = datetime.datetime.now()
download_slow(repo_id)
taken_slow = datetime.datetime.now() - start
start = datetime.datetime.now()
download_fast(repo_id)
taken_fast = datetime.datetime.now() - start
return f"""
Very slow : {taken_very_slow}
Slow : {taken_slow}
Fast : {taken_fast}
"""
examples = ["gpt2", "openai/whisper-large-v2"]
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
inputs = gr.Textbox(
label="Repo id",
value="gpt2", # should be set to " " when plugged into a real API
)
submit = gr.Button("Submit")
with gr.Column():
outputs = gr.Textbox(
label="Download speeds",
)
with gr.Row():
gr.Examples(examples=examples, inputs=[inputs])
submit.click(
download,
inputs=[inputs],
outputs=[outputs],
)
demo.launch()