Félix Marty commited on
Commit
025ff03
1 Parent(s): 9005b4d

update space

Browse files
Files changed (3) hide show
  1. app.py +1 -17
  2. onnx_export.py +2 -32
  3. requirements.txt +2 -2
app.py CHANGED
@@ -61,7 +61,7 @@ def onnx_export(token: str, model_id: str, task: str, opset: Union[int, str]) ->
61
 
62
  pr_revision = commit_info.pr_revision.replace("/", "%2F")
63
 
64
- return f"#### Success 🔥 Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url}). If you would like to use the exported model without waiting for the PR to be approved, head to "
65
  except Exception as e:
66
  return f"#### Error: {e}"
67
 
@@ -151,20 +151,4 @@ with gr.Blocks() as demo:
151
  outputs=output,
152
  )
153
 
154
- """
155
- demo = gr.Interface(
156
- title="",
157
- description=DESCRIPTION,
158
- allow_flagging="never",
159
- article="Check out the [🤗 Optimum repoository on GitHub](https://github.com/huggingface/optimum) as well!",
160
- inputs=[
161
- gr.Text(max_lines=1, label="Hugging Face token"),
162
- gr.Text(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA"),
163
- gr.Text(value="auto", max_lines=1, label="Task (can be left blank, will be automatically inferred)")
164
- ],
165
- outputs=[gr.Markdown(label="output")],
166
- fn=onnx_export,
167
- )
168
- """
169
-
170
  demo.launch()
 
61
 
62
  pr_revision = commit_info.pr_revision.replace("/", "%2F")
63
 
64
+ return f"#### Success 🔥 Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url}). If you would like to use the exported model without waiting for the PR to be approved, head to https://huggingface.co/{model_id}/tree/{pr_revision}"
65
  except Exception as e:
66
  return f"#### Error: {e}"
67
 
 
151
  outputs=output,
152
  )
153
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
  demo.launch()
onnx_export.py CHANGED
@@ -1,4 +1,3 @@
1
- import argparse
2
  import os
3
  import shutil
4
  from tempfile import TemporaryDirectory
@@ -10,9 +9,8 @@ from huggingface_hub import (
10
  )
11
  from huggingface_hub.file_download import repo_folder_name
12
  from optimum.exporters.onnx import main_export
13
- from optimum.exporters.tasks import TasksManager
14
 
15
- SPACES_URL = "https://huggingface.co/spaces/optimum/exporters"
16
 
17
 
18
  def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discussion"]:
@@ -84,7 +82,7 @@ def convert(
84
  try:
85
  pr = previous_pr(api, model_id, pr_title)
86
  if "model.onnx" in filenames and not force:
87
- raise Exception(f"Model {model_id} is already converted, skipping..")
88
  elif pr is not None and not force:
89
  url = f"https://huggingface.co/{model_id}/discussions/{pr.num}"
90
  new_pr = pr
@@ -111,31 +109,3 @@ The exported ONNX model can be then be consumed by various backends as TensorRT
111
  finally:
112
  shutil.rmtree(folder)
113
  return "0", new_pr
114
-
115
-
116
- if __name__ == "__main__":
117
- DESCRIPTION = """
118
- Simple utility tool to convert automatically a model on the hub to onnx format.
119
- It is PyTorch exclusive for now.
120
- It works by downloading the weights (PT), converting them locally, and uploading them back
121
- as a PR on the hub.
122
- """
123
- parser = argparse.ArgumentParser(description=DESCRIPTION)
124
- parser.add_argument(
125
- "--model_id",
126
- type=str,
127
- help="The name of the model on the hub to convert. E.g. `gpt2` or `facebook/wav2vec2-base-960h`",
128
- )
129
- parser.add_argument(
130
- "--task",
131
- type=str,
132
- help="The task the model is performing",
133
- )
134
- parser.add_argument(
135
- "--force",
136
- action="store_true",
137
- help="Create the PR even if it already exists of if the model was already converted.",
138
- )
139
- args = parser.parse_args()
140
- api = HfApi()
141
- convert(api, args.model_id, task=args.task, force=args.force)
 
 
1
  import os
2
  import shutil
3
  from tempfile import TemporaryDirectory
 
9
  )
10
  from huggingface_hub.file_download import repo_folder_name
11
  from optimum.exporters.onnx import main_export
 
12
 
13
+ SPACES_URL = "https://huggingface.co/spaces/onnx/export"
14
 
15
 
16
  def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discussion"]:
 
82
  try:
83
  pr = previous_pr(api, model_id, pr_title)
84
  if "model.onnx" in filenames and not force:
85
+ raise Exception(f"Model {model_id} is already converted, skipping the export.")
86
  elif pr is not None and not force:
87
  url = f"https://huggingface.co/{model_id}/discussions/{pr.num}"
88
  new_pr = pr
 
109
  finally:
110
  shutil.rmtree(folder)
111
  return "0", new_pr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
  torch
2
  transformers
3
- git+https://github.com/huggingface/huggingface_hub@main
4
- git+https://github.com/huggingface/optimum.git#egg=optimum[onnxruntime]
 
1
  torch
2
  transformers
3
+ huggingface_hub
4
+ optimum[onnxruntime,exporters]>=1.9.0