echarlaix HF staff commited on
Commit
d9f713b
1 Parent(s): 4b432e8

Add openvino export

Browse files
Files changed (4) hide show
  1. app.py +132 -0
  2. export.py +163 -0
  3. header.png +0 -0
  4. requirements.txt +2 -1
app.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ import os
3
+ from datetime import datetime
4
+ from typing import Optional, Union
5
+ import gradio as gr
6
+ from huggingface_hub import HfApi, Repository
7
+ from export import convert
8
+
9
+
10
+ DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
11
+ DATA_FILENAME = "data.csv"
12
+ DATA_FILE = os.path.join("openvino", DATA_FILENAME)
13
+ HF_TOKEN = os.environ.get("HF_WRITE_TOKEN")
14
+ DATA_DIR = "exporters_data"
15
+
16
+ repo = None
17
+ if HF_TOKEN:
18
+ repo = Repository(local_dir=DATA_DIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
19
+
20
+
21
+ def export(token: str, model_id: str, task: str) -> str:
22
+ if token == "" or model_id == "":
23
+ return """
24
+ ### Invalid input 🐞
25
+ Please fill a token and model name.
26
+ """
27
+ try:
28
+ api = HfApi(token=token)
29
+
30
+ error, commit_info = convert(api=api, model_id=model_id, task=task, force=False)
31
+ if error != "0":
32
+ return error
33
+
34
+ print("[commit_info]", commit_info)
35
+
36
+ # save in a private dataset
37
+ if repo is not None:
38
+ repo.git_pull(rebase=True)
39
+ with open(os.path.join(DATA_DIR, DATA_FILE), "a") as csvfile:
40
+ writer = csv.DictWriter(csvfile, fieldnames=["model_id", "pr_url", "time"])
41
+ writer.writerow(
42
+ {
43
+ "model_id": model_id,
44
+ "pr_url": commit_info.pr_url,
45
+ "time": str(datetime.now()),
46
+ }
47
+ )
48
+ commit_url = repo.push_to_hub()
49
+ print("[dataset]", commit_url)
50
+
51
+ return f"#### Success 🔥 Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url})"
52
+ except Exception as e:
53
+ return f"#### Error: {e}"
54
+
55
+
56
+ TTILE_IMAGE = """
57
+ <div
58
+ style="
59
+ display: block;
60
+ margin-left: auto;
61
+ margin-right: auto;
62
+ width: 50%;
63
+ "
64
+ >
65
+ <img src="https://huggingface.co/spaces/echarlaix/openvino-export/resolve/main/header.png"/>
66
+ </div>
67
+ """
68
+
69
+ TITLE = """
70
+ <div
71
+ style="
72
+ display: inline-flex;
73
+ align-items: center;
74
+ text-align: center;
75
+ max-width: 1400px;
76
+ gap: 0.8rem;
77
+ font-size: 2.2rem;
78
+ "
79
+ >
80
+ <h1 style="font-weight: 900; margin-bottom: 10px; margin-top: 10px;">
81
+ Export your Transformers and Diffusers model to OpenVINO with 🤗 Optimum Intel (experimental)
82
+ </h1>
83
+ </div>
84
+ """
85
+
86
+ DESCRIPTION = """
87
+ This Space allows you to automatically export to the OpenVINO format various 🤗 Transformers and Diffusers PyTorch models hosted on the Hugging Face Hub.
88
+
89
+ Once exported, you will be able to load the resulting model using the [🤗 Optimum Intel](https://huggingface.co/docs/optimum/intel/inference).
90
+
91
+ To export your model, the steps are as following:
92
+ - Paste a read-access token from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens). Read access is enough given that we will open a PR against the source repo.
93
+ - Input a model id from the Hub (for example: [distilbert-base-uncased-finetuned-sst-2-english](https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english))
94
+ - Click "Export"
95
+ - That’s it! You’ll get feedback if it works or not, and if it worked, you’ll get the URL of the opened PR 🔥
96
+ """
97
+
98
+ with gr.Blocks() as demo:
99
+ gr.HTML(TTILE_IMAGE)
100
+ gr.HTML(TITLE)
101
+
102
+ with gr.Row():
103
+ with gr.Column(scale=50):
104
+ gr.Markdown(DESCRIPTION)
105
+
106
+ with gr.Column(scale=50):
107
+ input_token = gr.Textbox(
108
+ max_lines=1,
109
+ label="Hugging Face token",
110
+ )
111
+ input_model = gr.Textbox(
112
+ max_lines=1,
113
+ label="Model name",
114
+ placeholder="distilbert-base-uncased-finetuned-sst-2-english",
115
+ )
116
+ input_task = gr.Textbox(
117
+ value="auto",
118
+ max_lines=1,
119
+ label='Task (can be left to "auto", will be automatically inferred)',
120
+ )
121
+
122
+ btn = gr.Button("Export")
123
+ output = gr.Markdown(label="Output")
124
+
125
+ btn.click(
126
+ fn=export,
127
+ inputs=[input_token, input_model, input_task],
128
+ outputs=output,
129
+ )
130
+
131
+
132
+ demo.launch()
export.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import os
3
+ import shutil
4
+ from pathlib import Path
5
+ from tempfile import TemporaryDirectory
6
+ from typing import List, Optional, Tuple
7
+ import torch
8
+
9
+ from huggingface_hub import (
10
+ CommitOperationAdd,
11
+ HfApi,
12
+ get_repo_discussions,
13
+ hf_hub_download,
14
+ )
15
+ from huggingface_hub.file_download import repo_folder_name
16
+ from optimum.exporters.onnx import validate_model_outputs
17
+ from optimum.exporters.tasks import TasksManager
18
+ from transformers import AutoConfig, AutoTokenizer, is_torch_available
19
+ from optimum.intel.openvino import (
20
+ _HEAD_TO_AUTOMODELS,
21
+ _TASK_ALIASES,
22
+ OVModelForAudioClassification,
23
+ OVModelForCausalLM,
24
+ OVModelForFeatureExtraction,
25
+ OVModelForImageClassification,
26
+ OVModelForMaskedLM,
27
+ OVModelForQuestionAnswering,
28
+ OVModelForSeq2SeqLM,
29
+ OVModelForSequenceClassification,
30
+ OVModelForTokenClassification,
31
+ OVStableDiffusionPipeline,
32
+ )
33
+
34
+
35
+ SPACES_URL = "https://huggingface.co/spaces/echarlaix/openvino-export"
36
+
37
+
38
+ def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discussion"]:
39
+ try:
40
+ discussions = api.get_repo_discussions(repo_id=model_id)
41
+ except Exception:
42
+ return None
43
+ for discussion in discussions:
44
+ if (
45
+ discussion.status == "open"
46
+ and discussion.is_pull_request
47
+ and discussion.title == pr_title
48
+ ):
49
+ return discussion
50
+
51
+
52
+ def convert_openvino(model_id: str, task: str, folder: str) -> List:
53
+ task = _TASK_ALIASES.get(task, task)
54
+ if task not in _HEAD_TO_AUTOMODELS:
55
+ raise ValueError(f"The task '{task}' is not supported, only {_HEAD_TO_AUTOMODELS.keys()} tasks are supported")
56
+ auto_model_class = eval(_HEAD_TO_AUTOMODELS[task])
57
+ ov_model = auto_model_class.from_pretrained(model_id, export=True)
58
+ ov_model.save_pretrained(folder)
59
+ if not isinstance(ov_model, OVStableDiffusionPipeline):
60
+ try:
61
+ model = TasksManager.get_model_from_task(task, model_id)
62
+ onnx_config_class = TasksManager.get_exporter_config_constructor(
63
+ exporter="onnx",
64
+ model=model,
65
+ task=task,
66
+ model_name=model_id,
67
+ model_type=model.config.model_type.replace("_", "-"),
68
+ )
69
+ onnx_config = onnx_config_class(model.config)
70
+ inputs = onnx_config.generate_dummy_inputs(framework="pt")
71
+
72
+ if isinstance(ov_model, (OVModelForCausalLM, OVModelForSeq2SeqLM)):
73
+ ov_outputs = ov_model.generate(**inputs)
74
+ outputs = model.generate(**inputs)
75
+ else:
76
+ ov_outputs = ov_model(**inputs)
77
+ outputs = model(**inputs)
78
+
79
+ if isinstance(outputs, torch.Tensor):
80
+ outputs = {"logits": outputs}
81
+ ov_outputs = {"logits": ov_outputs}
82
+ for output_name in outputs:
83
+ if not torch.allclose(outputs[output_name], ov_outputs[output_name], atol=1e-3):
84
+ raise ValueError(
85
+ "The exported model does not have the same outputs as the original model. Export interrupted."
86
+ )
87
+ except Exception as e:
88
+ raise
89
+
90
+ file_names = {elem for elem in os.listdir(folder) if os.path.isfile(os.path.join(folder, elem))}
91
+
92
+ operations = [
93
+ CommitOperationAdd(
94
+ path_in_repo=file_name, path_or_fileobj=os.path.join(folder, file_name)
95
+ )
96
+ for file_name in file_names if "openvino" in file_name
97
+ ]
98
+
99
+ dir_names = set(os.listdir(folder)) - file_names
100
+
101
+ for dir_name in dir_names.intersection({"vae_encoder", "vae_decoder", "text_encoder", "unet"}):
102
+ operations += [
103
+ CommitOperationAdd(
104
+ path_in_repo=os.path.join(dir_name, file_name),
105
+ path_or_fileobj=os.path.join(folder, dir_name, file_name),
106
+ )
107
+ for file_name in os.listdir(os.path.join(folder, dir_name)) if "openvino" in file_name
108
+ ]
109
+
110
+ return operations
111
+
112
+
113
+ def convert(
114
+ api: "HfApi",
115
+ model_id: str,
116
+ task: str,
117
+ force: bool = False,
118
+ ) -> Tuple[int, "CommitInfo"]:
119
+ pr_title = "Adding OpenVINO file of this model"
120
+ info = api.model_info(model_id)
121
+ filenames = set(s.rfilename for s in info.siblings)
122
+
123
+ requesting_user = api.whoami()["name"]
124
+
125
+ if task == "auto":
126
+ try:
127
+ task = TasksManager.infer_task_from_model(model_id)
128
+ except Exception as e:
129
+ return (
130
+ f"### Error: {e}. Please pass explicitely the task as it could not be infered.",
131
+ None,
132
+ )
133
+
134
+ with TemporaryDirectory() as d:
135
+ folder = os.path.join(d, repo_folder_name(repo_id=model_id, repo_type="models"))
136
+ os.makedirs(folder)
137
+ new_pr = None
138
+ try:
139
+ pr = previous_pr(api, model_id, pr_title)
140
+ if "openvino_model.xml" in filenames and not force:
141
+ raise Exception(f"Model {model_id} is already converted, skipping..")
142
+ elif pr is not None and not force:
143
+ url = f"https://huggingface.co/{model_id}/discussions/{pr.num}"
144
+ new_pr = pr
145
+ raise Exception(
146
+ f"Model {model_id} already has an open PR check out [{url}]({url})"
147
+ )
148
+ else:
149
+ operations = convert_openvino(model_id, task, folder)
150
+
151
+ commit_description = f"""
152
+ Beep boop I am the [OpenVINO exporter bot 🤖]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to add to this repository the exported OpenVINO model.
153
+ """
154
+ new_pr = api.create_commit(
155
+ repo_id=model_id,
156
+ operations=operations,
157
+ commit_message=pr_title,
158
+ commit_description=commit_description,
159
+ create_pr=True,
160
+ )
161
+ finally:
162
+ shutil.rmtree(folder)
163
+ return "0", new_pr
header.png ADDED
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
  transformers
2
  diffusers
3
- git+https://github.com/huggingface/optimum-intel.git#egg=optimum[openvino]
 
 
1
  transformers
2
  diffusers
3
+ optimum==1.7.1
4
+ git+https://github.com/huggingface/optimum-intel.git#egg=optimum-intel[openvino]