export / app.py
echarlaix's picture
echarlaix HF staff
add gradio authentication token
cb5d8eb
raw
history blame
3.51 kB
import csv
import os
from datetime import datetime
from typing import Optional, Union
import gradio as gr
from huggingface_hub import HfApi, Repository
from export import convert
from gradio_huggingfacehub_search import HuggingfaceHubSearch
DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
DATA_FILENAME = "data.csv"
DATA_FILE = os.path.join("openvino", DATA_FILENAME)
HF_TOKEN = os.environ.get("HF_WRITE_TOKEN")
DATA_DIR = "exporters_data"
repo = None
if HF_TOKEN:
repo = Repository(local_dir=DATA_DIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
def export(model_id: str, task: str, oauth_token: gr.OAuthToken) -> str:
if oauth_token.token is None:
raise ValueError("You must be logged in to use this space")
token = oauth_token.token
if model_id == "" or token == "":
return """
### Invalid input 🐞
Please fill a token and model name.
"""
try:
api = HfApi(token=token)
error, commit_info = convert(api=api, model_id=model_id, task=task, force=False)
if error != "0":
return error
print("[commit_info]", commit_info)
# save in a private dataset
if repo is not None:
repo.git_pull(rebase=True)
with open(os.path.join(DATA_DIR, DATA_FILE), "a") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=["model_id", "pr_url", "time"])
writer.writerow(
{
"model_id": model_id,
"pr_url": commit_info.pr_url,
"time": str(datetime.now()),
}
)
commit_url = repo.push_to_hub()
print("[dataset]", commit_url)
return f"#### Success πŸ”₯ Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url})"
except Exception as e:
return f"#### Error: {e}"
TTILE_IMAGE = """
<div
style="
display: block;
margin-left: auto;
margin-right: auto;
width: 50%;
"
>
<img src="https://huggingface.co/spaces/echarlaix/openvino-export/resolve/main/header.png"/>
</div>
"""
TITLE = """
<div
style="
display: inline-flex;
align-items: center;
text-align: center;
max-width: 1400px;
gap: 0.8rem;
font-size: 2.2rem;
"
>
<h1 style="text-align:center; font-weight: 1200">
Export your model to OpenVINO
</h1>
</div>
"""
DESCRIPTION = """
This Space uses [Optimum Intel](https://huggingface.co/docs/optimum/intel/inference) to automatically export your model to the OpenVINO format.
After the model conversion, we will open a PR against the source repo to add the resulting model.
To export your model you need:
- A Model ID from the Hub
That's it ! πŸ”₯
"""
model_id = HuggingfaceHubSearch(
label="Hub Model ID",
placeholder="Search for model id on the hub",
search_type="model",
)
task = gr.Textbox(
value="auto",
label="Task : can be left to auto, will be automatically inferred",
)
interface = gr.Interface(
fn=export,
inputs=[
model_id,
task,
],
outputs=[
gr.Markdown(label="output"),
],
title=TITLE,
description=DESCRIPTION,
api_name=False,
)
with gr.Blocks() as demo:
gr.Markdown("You must be logged in to use this space")
gr.LoginButton(min_width=250)
interface.render()
demo.launch()