Spaces:
onnx
/
Running on CPU Upgrade

Felix Marty commited on
Commit
bdeb572
β€’
1 Parent(s): c77e121

add log, better readme

Browse files
Files changed (2) hide show
  1. app.py +20 -15
  2. clean_hf_onnx.png +0 -0
app.py CHANGED
@@ -8,9 +8,7 @@ from onnx_export import convert
8
  from huggingface_hub import HfApi, Repository
9
 
10
 
11
- # TODO: save stats about the Space?
12
- """
13
- DATASET_REPO_URL = "https://huggingface.co/datasets/safetensors/conversions"
14
  DATA_FILENAME = "data.csv"
15
  DATA_FILE = os.path.join("data", DATA_FILENAME)
16
 
@@ -19,7 +17,6 @@ HF_TOKEN = os.environ.get("HF_TOKEN")
19
  repo: Optional[Repository] = None
20
  if HF_TOKEN:
21
  repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN)
22
- """
23
 
24
  def onnx_export(token: str, model_id: str, task: str) -> str:
25
  if token == "" or model_id == "":
@@ -37,11 +34,7 @@ def onnx_export(token: str, model_id: str, task: str) -> str:
37
 
38
  print("[commit_info]", commit_info)
39
 
40
- return f"### Success πŸ”₯ Yay! This model was successfully converted and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url})"
41
- except Exception as e:
42
- return f"### Error: {e}"
43
- """
44
- # save in a private dataset:
45
  if repo is not None:
46
  repo.git_pull(rebase=True)
47
  with open(DATA_FILE, "a") as csvfile:
@@ -59,27 +52,39 @@ def onnx_export(token: str, model_id: str, task: str) -> str:
59
  print("[dataset]", commit_url)
60
 
61
  return f"### Success πŸ”₯ Yay! This model was successfully converted and a PR was open using your token, here: {commit_info.pr_url}]({commit_info.pr_url})"
62
- """
63
-
64
 
65
  DESCRIPTION = """
 
 
 
 
 
 
 
 
 
 
66
  The steps are the following:
67
  - Paste a read-access token from hf.co/settings/tokens. Read access is enough given that we will open a PR against the source repo.
68
- - Input a model id from the Hub
69
  - If necessary, input the task for this model.
70
  - Click "Convert to ONNX"
71
  - That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR!
 
 
72
  """
73
 
74
  demo = gr.Interface(
75
- title="POC to convert any PyTorch model to ONNX",
76
  description=DESCRIPTION,
77
  allow_flagging="never",
78
  article="Check out the [Optimum repo on GitHub](https://github.com/huggingface/optimum)",
79
  inputs=[
80
  gr.Text(max_lines=1, label="Hugging Face token"),
81
- gr.Text(max_lines=1, label="model_id"),
82
- gr.Text(value="auto", max_lines=1, label="task (can be left blank, will auto-infer)")
83
  ],
84
  outputs=[gr.Markdown(label="output")],
85
  fn=onnx_export,
 
8
  from huggingface_hub import HfApi, Repository
9
 
10
 
11
+ DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
 
 
12
  DATA_FILENAME = "data.csv"
13
  DATA_FILE = os.path.join("data", DATA_FILENAME)
14
 
 
17
  repo: Optional[Repository] = None
18
  if HF_TOKEN:
19
  repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN)
 
20
 
21
  def onnx_export(token: str, model_id: str, task: str) -> str:
22
  if token == "" or model_id == "":
 
34
 
35
  print("[commit_info]", commit_info)
36
 
37
+ # save in a private dataset
 
 
 
 
38
  if repo is not None:
39
  repo.git_pull(rebase=True)
40
  with open(DATA_FILE, "a") as csvfile:
 
52
  print("[dataset]", commit_url)
53
 
54
  return f"### Success πŸ”₯ Yay! This model was successfully converted and a PR was open using your token, here: {commit_info.pr_url}]({commit_info.pr_url})"
55
+ except Exception as e:
56
+ return f"### Error: {e}"
57
 
58
  DESCRIPTION = """
59
+ <p align="center">
60
+ <img src="clean_hf_onnx.png"/>
61
+ </p>
62
+
63
+ This Space allows to automatically convert to ONNX transformers models hosted on the Hugging Face Hub. It opens a PR on the target model, and it is up to the owner of the original model
64
+ to merge the PR to allow people to leverage the ONNX standard to share and use the model on a wide range of devices!
65
+
66
+ Once converted, the model can for example be used in the [πŸ€— Optimum](https://huggingface.co/docs/optimum/) library following closely the transormers API.
67
+ Check out [this guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models) to see how!
68
+
69
  The steps are the following:
70
  - Paste a read-access token from hf.co/settings/tokens. Read access is enough given that we will open a PR against the source repo.
71
+ - Input a model id from the Hub (for example:)
72
  - If necessary, input the task for this model.
73
  - Click "Convert to ONNX"
74
  - That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR!
75
+
76
+ Note: in case the model to convert is larger than 2 GB, it will be saved in a subfolder called `onnx/`. To load it from Optimum, the argument `subfolder="onnx"` should be provided.
77
  """
78
 
79
  demo = gr.Interface(
80
+ title="Convert any PyTorch model to ONNX with πŸ€— Optimum Exporters 🏎️",
81
  description=DESCRIPTION,
82
  allow_flagging="never",
83
  article="Check out the [Optimum repo on GitHub](https://github.com/huggingface/optimum)",
84
  inputs=[
85
  gr.Text(max_lines=1, label="Hugging Face token"),
86
+ gr.Text(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA"),
87
+ gr.Text(value="auto", max_lines=1, label="Task (can be left blank, will be automatically inferred)")
88
  ],
89
  outputs=[gr.Markdown(label="output")],
90
  fn=onnx_export,
clean_hf_onnx.png ADDED