Felix Marty commited on
Commit
7567dc4
1 Parent(s): cb73b3c

add commit description

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. onnx_export.py +15 -0
app.py CHANGED
@@ -84,8 +84,8 @@ with gr.Blocks() as demo:
84
  gr.Markdown(DESCRIPTION)
85
 
86
  with gr.Row():
87
- input_token = gr.Textbox(max_lines=1, label="Hugging Face token"),
88
- input_model = gr.Textbox(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA"),
89
  input_task = gr.Textbox(value="auto", max_lines=1, label="Task (can be left blank, will be automatically inferred)")
90
  output = gr.Markdown(label="Output")
91
 
 
84
  gr.Markdown(DESCRIPTION)
85
 
86
  with gr.Row():
87
+ input_token = gr.Textbox(max_lines=1, label="Hugging Face token")
88
+ input_model = gr.Textbox(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA")
89
  input_task = gr.Textbox(value="auto", max_lines=1, label="Task (can be left blank, will be automatically inferred)")
90
  output = gr.Markdown(label="Output")
91
 
onnx_export.py CHANGED
@@ -17,6 +17,8 @@ from typing import Optional, Tuple, List
17
  from huggingface_hub import CommitOperationAdd, HfApi, hf_hub_download, get_repo_discussions
18
  from huggingface_hub.file_download import repo_folder_name
19
 
 
 
20
  def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discussion"]:
21
  try:
22
  discussions = api.get_repo_discussions(repo_id=model_id)
@@ -90,6 +92,8 @@ def convert(api: "HfApi", model_id: str, task: str, force: bool = False) -> Tupl
90
  info = api.model_info(model_id)
91
  filenames = set(s.rfilename for s in info.siblings)
92
 
 
 
93
  if task == "auto":
94
  try:
95
  task = TasksManager.infer_task_from_model(model_id)
@@ -111,10 +115,21 @@ def convert(api: "HfApi", model_id: str, task: str, force: bool = False) -> Tupl
111
  else:
112
  operations = convert_onnx(model_id, task, folder)
113
 
 
 
 
 
 
 
 
 
 
 
114
  new_pr = api.create_commit(
115
  repo_id=model_id,
116
  operations=operations,
117
  commit_message=pr_title,
 
118
  create_pr=True,
119
  )
120
  finally:
 
17
  from huggingface_hub import CommitOperationAdd, HfApi, hf_hub_download, get_repo_discussions
18
  from huggingface_hub.file_download import repo_folder_name
19
 
20
+ SPACES_URL = "https://huggingface.co/spaces/optimum/exporters"
21
+
22
  def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discussion"]:
23
  try:
24
  discussions = api.get_repo_discussions(repo_id=model_id)
 
92
  info = api.model_info(model_id)
93
  filenames = set(s.rfilename for s in info.siblings)
94
 
95
+ requesting_user = api.whoami()["name"]
96
+
97
  if task == "auto":
98
  try:
99
  task = TasksManager.infer_task_from_model(model_id)
 
115
  else:
116
  operations = convert_onnx(model_id, task, folder)
117
 
118
+ commit_description = f"""
119
+ Beep boop I am the [ONNX export bot 🤖🏎️]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to
120
+ add to this repository the model converted to ONNX.
121
+
122
+ What is ONNX? It stands for "Open Neural Network Exchange", and is the most commonly used open standard for machine learning interoperability.
123
+ You can find out more at [onnx.ai](https://onnx.ai/)!
124
+
125
+ The exported ONNX model can be then be consumed by various backends as TensorRT or TVM, or simply be used in a few lines
126
+ with 🤗 Optimum through ONNX Runtime, check out how [here](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models)!
127
+ """
128
  new_pr = api.create_commit(
129
  repo_id=model_id,
130
  operations=operations,
131
  commit_message=pr_title,
132
+ commit_description=commit_description, # TODO
133
  create_pr=True,
134
  )
135
  finally: