mav23 commited on
Commit
1e7dfc2
·
verified ·
1 Parent(s): aa34e36

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +160 -0
app.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ import os
3
+ from datetime import datetime
4
+ from typing import Optional, Union
5
+
6
+ import gradio as gr
7
+ from huggingface_hub import HfApi, Repository
8
+
9
+ from onnx_export import convert
10
+
11
+ from apscheduler.schedulers.background import BackgroundScheduler
12
+
13
+ DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
14
+ DATA_FILENAME = "data.csv"
15
+ DATA_FILE = os.path.join("data", DATA_FILENAME)
16
+
17
+ HF_TOKEN = os.environ.get("HF_WRITE_TOKEN")
18
+
19
+ DATADIR = "exporters_data"
20
+
21
+ repo: Optional[Repository] = None
22
+ # if HF_TOKEN:
23
+ # repo = Repository(local_dir=DATADIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
24
+
25
+
26
+ def onnx_export(token: str, model_id: str, task: str, opset: Union[int, str]) -> str:
27
+ if token == "" or model_id == "":
28
+ return """
29
+ ### Invalid input 🐞
30
+
31
+ Please fill a token and model name.
32
+ """
33
+ try:
34
+ if opset == "":
35
+ opset = None
36
+ else:
37
+ opset = int(opset)
38
+
39
+ api = HfApi(token=token)
40
+
41
+ error, commit_info = convert(api=api, model_id=model_id, task=task, opset=opset)
42
+ if error != "0":
43
+ return error
44
+
45
+ print("[commit_info]", commit_info)
46
+
47
+ # save in a private dataset
48
+ if repo is not None:
49
+ repo.git_pull(rebase=True)
50
+ with open(os.path.join(DATADIR, DATA_FILE), "a") as csvfile:
51
+ writer = csv.DictWriter(
52
+ csvfile, fieldnames=["model_id", "pr_url", "time"]
53
+ )
54
+ writer.writerow(
55
+ {
56
+ "model_id": model_id,
57
+ "pr_url": commit_info.pr_url,
58
+ "time": str(datetime.now()),
59
+ }
60
+ )
61
+ commit_url = repo.push_to_hub()
62
+ print("[dataset]", commit_url)
63
+
64
+ pr_revision = commit_info.pr_revision.replace("/", "%2F")
65
+
66
+ return f"#### This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url}). If you would like to use the exported model without waiting for the PR to be approved, head to https://huggingface.co/{model_id}/tree/{pr_revision}"
67
+ except Exception as e:
68
+ return f"#### Error: {e}"
69
+
70
+
71
+ TTILE_IMAGE = """
72
+ <div
73
+ style="
74
+ display: block;
75
+ margin-left: auto;
76
+ margin-right: auto;
77
+ width: 50%;
78
+ "
79
+ >
80
+ <img src="https://i.ibb.co/m5VnjSsQ/Blue-and-White-Illustrative-Profile-Twitter-Header.png"/>
81
+ </div>
82
+ """
83
+
84
+ TITLE = """
85
+ <div
86
+ style="
87
+ display: inline-flex;
88
+ align-items: center;
89
+ text-align: center;
90
+ max-width: 1400px;
91
+ gap: 0.8rem;
92
+ font-size: 2.2rem;
93
+ "
94
+ >
95
+ <h1 style="font-weight: 900; margin-bottom: 10px; margin-top: 10px;">
96
+ Export transformers model to ONNX with HF Optimum exporters.
97
+ </h1>
98
+ </div>
99
+ """
100
+
101
+ # for some reason https://huggingface.co/settings/tokens is not showing as a link by default?
102
+ DESCRIPTION = """
103
+ This Space enables automatic export of Hugging Face transformers PyTorch models to [ONNX](https://onnx.ai/). It creates a pull request on the target model repository, allowing model owners to review and merge the ONNX export, making their models accessible across a wide range of devices and platforms.
104
+
105
+ Once exported, the model can be seamlessly integrated with [HF Optimum](https://huggingface.co/docs/optimum/), maintaining compatibility with the transformers API. For detailed implementation, check out [this comprehensive guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models).
106
+
107
+ Quick Start Guide:
108
+ 1. Obtain a read-access token from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) (read access is sufficient for PR creation)
109
+ 2. Enter a model ID from the Hub (e.g., [textattack/distilbert-base-cased-CoLA](https://huggingface.co/textattack/distilbert-base-cased-CoLA))
110
+ 3. Click "Export to ONNX"
111
+ 4. Done! You'll receive feedback on the export status and, if successful, the URL of the created pull request
112
+
113
+ Important Note: For models exceeding 2 GB, the ONNX export will be saved in an `onnx/` subfolder. When loading such models with Optimum, remember to include the `subfolder="onnx"` parameter."""
114
+
115
+ with gr.Blocks() as demo:
116
+ gr.HTML(TTILE_IMAGE)
117
+ gr.HTML(TITLE)
118
+
119
+ with gr.Row():
120
+ with gr.Column(scale=50):
121
+ gr.Markdown(DESCRIPTION)
122
+
123
+ with gr.Column(scale=50):
124
+ input_token = gr.Textbox(
125
+ max_lines=1,
126
+ label="Hugging Face token",
127
+ )
128
+ input_model = gr.Textbox(
129
+ max_lines=1,
130
+ label="Model name",
131
+ placeholder="textattack/distilbert-base-cased-CoLA",
132
+ )
133
+ input_task = gr.Textbox(
134
+ value="auto",
135
+ max_lines=1,
136
+ label='Task (can be left to "auto", will be automatically inferred)',
137
+ )
138
+ onnx_opset = gr.Textbox(
139
+ placeholder="for example 14, can be left blank",
140
+ max_lines=1,
141
+ label="ONNX opset (optional, can be left blank)",
142
+ )
143
+
144
+ btn = gr.Button("Export to ONNX")
145
+ output = gr.Markdown(label="Output")
146
+
147
+ btn.click(
148
+ fn=onnx_export,
149
+ inputs=[input_token, input_model, input_task, onnx_opset],
150
+ outputs=output,
151
+ )
152
+
153
+ def restart_space():
154
+ HfApi().restart_space(repo_id="onnx/export", token=HF_TOKEN, factory_reboot=True)
155
+
156
+ scheduler = BackgroundScheduler()
157
+ scheduler.add_job(restart_space, "interval", seconds=21600)
158
+ scheduler.start()
159
+
160
+ demo.launch()