Spaces:
onnx
/
Running on CPU Upgrade

FΓ©lix Marty commited on
Commit
9005b4d
β€’
1 Parent(s): 12e01c3
Files changed (2) hide show
  1. app.py +4 -2
  2. onnx_export.py +7 -61
app.py CHANGED
@@ -59,7 +59,9 @@ def onnx_export(token: str, model_id: str, task: str, opset: Union[int, str]) ->
59
  commit_url = repo.push_to_hub()
60
  print("[dataset]", commit_url)
61
 
62
- return f"#### Success πŸ”₯ Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url})"
 
 
63
  except Exception as e:
64
  return f"#### Error: {e}"
65
 
@@ -89,7 +91,7 @@ TITLE = """
89
  "
90
  >
91
  <h1 style="font-weight: 900; margin-bottom: 10px; margin-top: 10px;">
92
- Export transformers model to ONNX with πŸ€— Optimum exporters 🏎️ (Beta)
93
  </h1>
94
  </div>
95
  """
 
59
  commit_url = repo.push_to_hub()
60
  print("[dataset]", commit_url)
61
 
62
+ pr_revision = commit_info.pr_revision.replace("/", "%2F")
63
+
64
+ return f"#### Success πŸ”₯ Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url}). If you would like to use the exported model without waiting for the PR to be approved, head to "
65
  except Exception as e:
66
  return f"#### Error: {e}"
67
 
 
91
  "
92
  >
93
  <h1 style="font-weight: 900; margin-bottom: 10px; margin-top: 10px;">
94
+ Export transformers model to ONNX with πŸ€— Optimum exporters 🏎️
95
  </h1>
96
  </div>
97
  """
onnx_export.py CHANGED
@@ -1,20 +1,16 @@
1
  import argparse
2
  import os
3
  import shutil
4
- from pathlib import Path
5
  from tempfile import TemporaryDirectory
6
  from typing import List, Optional, Tuple
7
 
8
  from huggingface_hub import (
9
  CommitOperationAdd,
10
  HfApi,
11
- get_repo_discussions,
12
- hf_hub_download,
13
  )
14
  from huggingface_hub.file_download import repo_folder_name
15
- from optimum.exporters.onnx import OnnxConfigWithPast, export, validate_model_outputs
16
  from optimum.exporters.tasks import TasksManager
17
- from transformers import AutoConfig, AutoTokenizer, is_torch_available
18
 
19
  SPACES_URL = "https://huggingface.co/spaces/optimum/exporters"
20
 
@@ -33,54 +29,13 @@ def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discuss
33
  return discussion
34
 
35
 
36
- def convert_onnx(model_id: str, task: str, folder: str, opset: int) -> List:
37
-
38
- # Allocate the model
39
- model = TasksManager.get_model_from_task(task, model_id, framework="pt")
40
- model_type = model.config.model_type.replace("_", "-")
41
- model_name = getattr(model, "name", None)
42
-
43
- onnx_config_constructor = TasksManager.get_exporter_config_constructor(
44
- exporter="onnx",
45
- model=model,
46
  task=task,
47
- model_name=model_name,
48
- model_type=model_type,
49
- )
50
- onnx_config = onnx_config_constructor(model.config)
51
-
52
- needs_pad_token_id = (
53
- isinstance(onnx_config, OnnxConfigWithPast)
54
- and getattr(model.config, "pad_token_id", None) is None
55
- and task in ["sequence_classification"]
56
  )
57
- if needs_pad_token_id:
58
- # if args.pad_token_id is not None:
59
- # model.config.pad_token_id = args.pad_token_id
60
- try:
61
- tok = AutoTokenizer.from_pretrained(model_id)
62
- model.config.pad_token_id = tok.pad_token_id
63
- except Exception:
64
- raise ValueError(
65
- "Could not infer the pad token id, which is needed in this case, please provide it with the --pad_token_id argument"
66
- )
67
-
68
- # Ensure the requested opset is sufficient
69
- if opset == None:
70
- opset = onnx_config.DEFAULT_ONNX_OPSET
71
-
72
- output = Path(folder).joinpath("model.onnx")
73
- onnx_inputs, onnx_outputs = export(model, onnx_config, output, opset)
74
-
75
- atol = onnx_config.ATOL_FOR_VALIDATION
76
- if isinstance(atol, dict):
77
- atol = atol[task.replace("-with-past", "")]
78
-
79
- try:
80
- validate_model_outputs(onnx_config, model, output, onnx_outputs, atol)
81
- print(f"All good, model saved at: {output}")
82
- except ValueError:
83
- print(f"An error occured, but the model was saved at: {output.as_posix()}")
84
 
85
  n_files = len(
86
  [
@@ -122,15 +77,6 @@ def convert(
122
 
123
  requesting_user = api.whoami()["name"]
124
 
125
- if task == "auto":
126
- try:
127
- task = TasksManager.infer_task_from_model(model_id)
128
- except Exception as e:
129
- return (
130
- f"### Error: {e}. Please pass explicitely the task as it could not be infered.",
131
- None,
132
- )
133
-
134
  with TemporaryDirectory() as d:
135
  folder = os.path.join(d, repo_folder_name(repo_id=model_id, repo_type="models"))
136
  os.makedirs(folder)
@@ -146,7 +92,7 @@ def convert(
146
  f"Model {model_id} already has an open PR check out [{url}]({url})"
147
  )
148
  else:
149
- operations = convert_onnx(model_id, task, folder, opset)
150
 
151
  commit_description = f"""
152
  Beep boop I am the [ONNX export bot πŸ€–πŸŽοΈ]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to add to this repository the model converted to ONNX.
 
1
  import argparse
2
  import os
3
  import shutil
 
4
  from tempfile import TemporaryDirectory
5
  from typing import List, Optional, Tuple
6
 
7
  from huggingface_hub import (
8
  CommitOperationAdd,
9
  HfApi,
 
 
10
  )
11
  from huggingface_hub.file_download import repo_folder_name
12
+ from optimum.exporters.onnx import main_export
13
  from optimum.exporters.tasks import TasksManager
 
14
 
15
  SPACES_URL = "https://huggingface.co/spaces/optimum/exporters"
16
 
 
29
  return discussion
30
 
31
 
32
+ def export_and_git_add(model_id: str, task: str, folder: str, opset: int) -> List:
33
+ main_export(
34
+ model_name_or_path=model_id,
35
+ output=folder,
 
 
 
 
 
 
36
  task=task,
37
+ opset=opset,
 
 
 
 
 
 
 
 
38
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  n_files = len(
41
  [
 
77
 
78
  requesting_user = api.whoami()["name"]
79
 
 
 
 
 
 
 
 
 
 
80
  with TemporaryDirectory() as d:
81
  folder = os.path.join(d, repo_folder_name(repo_id=model_id, repo_type="models"))
82
  os.makedirs(folder)
 
92
  f"Model {model_id} already has an open PR check out [{url}]({url})"
93
  )
94
  else:
95
+ operations = export_and_git_add(model_id, task, folder, opset)
96
 
97
  commit_description = f"""
98
  Beep boop I am the [ONNX export bot πŸ€–πŸŽοΈ]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to add to this repository the model converted to ONNX.