|
import csv |
|
from datetime import datetime |
|
import os |
|
from typing import Optional |
|
import gradio as gr |
|
|
|
from huggingface_hub import HfApi, Repository |
|
|
|
HF_TOKEN = os.environ.get("HF_TOKEN") |
|
|
|
repo: Optional[Repository] = None |
|
|
|
|
|
|
|
DESCRIPTION = """ |
|
The steps are the following: |
|
|
|
- Paste a read-access token from hf.co/settings/tokens. Read access is enough given that we will open a PR against the source repo. |
|
- Input a model id from the Hub |
|
- Click "Submit" |
|
- That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR 🔥 |
|
|
|
⚠️ For now only `pytorch_model.bin` files are supported but we'll extend in the future. |
|
""" |
|
|
|
title="Convert any model to Safetensors and open a PR" |
|
allow_flagging="never" |
|
|
|
def token_text(visible=False): |
|
return gr.Text(max_lines=1, label="your_hf_token", visible=visible) |
|
|
|
def run(a, b, c): |
|
return f"{a}, {b}, {c}" |
|
|
|
with gr.Blocks(title=title) as demo: |
|
description = gr.Markdown(f"""# {title}""") |
|
description = gr.Markdown(DESCRIPTION) |
|
|
|
with gr.Row() as r: |
|
with gr.Column() as c: |
|
model_id = gr.Text(max_lines=1, label="model_id") |
|
is_private = gr.Checkbox(label="Private model") |
|
token = token_text() |
|
with gr.Row() as c: |
|
clean = gr.ClearButton() |
|
submit = gr.Button("Submit", variant="primary") |
|
|
|
with gr.Column() as d: |
|
output = gr.Markdown() |
|
|
|
is_private.change(lambda s: token_text(s), inputs=is_private, outputs=token) |
|
submit.click(run, inputs=[model_id, is_private, token], outputs=output, concurrency_limit=1) |
|
|
|
demo.queue(max_size=10).launch(show_api=True) |
|
|