IlyasMoutawwakil HF staff commited on
Commit
5468ec9
β€’
1 Parent(s): 0f9db6d
Files changed (2) hide show
  1. app.py +49 -36
  2. packages.txt +1 -0
app.py CHANGED
@@ -107,41 +107,53 @@ def run_benchmark(kwargs, oauth_token: gr.OAuthToken):
107
  **configs["ipex"],
108
  )
109
 
110
- html_output = f"<h3>Running benchmark for model {model} on task {task} with backends {backends}</h3>"
 
 
111
 
112
- yield html_output
113
 
114
  timestamp = time.strftime("%Y-%m-%d-%H-%M-%S")
115
 
116
  for backend in backends:
117
- benchmark_name = f"{timestamp}/{backend}"
118
- benchmark_config = BenchmarkConfig(
119
- name=benchmark_name,
120
- backend=configs[backend],
121
- launcher=configs[LAUNCHER],
122
- scenario=configs[SCENARIO],
123
- )
124
- benchmark_config.push_to_hub(
125
- repo_id=f"{username}/benchmarks",
126
- subfolder=benchmark_name,
127
- token=oauth_token.token,
128
- )
129
- benchmark_report = Benchmark.launch(benchmark_config)
130
- benchmark_report.push_to_hub(
131
- repo_id=f"{username}/benchmarks",
132
- subfolder=benchmark_name,
133
- token=oauth_token.token,
134
- )
135
- benchmark = Benchmark(config=benchmark_config, report=benchmark_report)
136
- benchmark.push_to_hub(
137
- repo_id=f"{username}/benchmarks",
138
- subfolder=benchmark_name,
139
- token=oauth_token.token,
140
- )
141
-
142
- html_output += f"<br>πŸ“Š Benchmark report for {backend} backend in the folder {benchmark_name} of your benchmarks dataset"
143
-
144
- yield html_output
 
 
 
 
 
 
 
 
 
 
145
 
146
 
147
  def build_demo():
@@ -194,6 +206,10 @@ def build_demo():
194
  with gr.Accordion(label="Process Config", open=False, visible=True):
195
  process_config = get_process_config()
196
 
 
 
 
 
197
  with gr.Row() as backend_configs:
198
  with gr.Accordion(label="OnnxRuntime Config", open=False, visible=True):
199
  onnxruntime_config = get_onnxruntime_config()
@@ -213,13 +229,10 @@ def build_demo():
213
  )
214
 
215
  with gr.Row():
216
- with gr.Accordion(label="Scenario Config", open=False, visible=True):
217
- inference_config = get_inference_config()
218
-
219
- button = gr.Button(value="Run Benchmark", variant="primary")
220
 
221
  with gr.Row():
222
- html_output = gr.HTML(label="Output", value="")
223
 
224
  button.click(
225
  fn=run_benchmark,
@@ -234,7 +247,7 @@ def build_demo():
234
  *pytorch_config.values(),
235
  *ipex_config.values(),
236
  },
237
- outputs=[html_output],
238
  concurrency_limit=1,
239
  )
240
 
 
107
  **configs["ipex"],
108
  )
109
 
110
+ md_output = (
111
+ f"<h3>Running benchmark for model {model} on task {task} with {backends}</h3>"
112
+ )
113
 
114
+ yield md_output
115
 
116
  timestamp = time.strftime("%Y-%m-%d-%H-%M-%S")
117
 
118
  for backend in backends:
119
+ md_output += f"<br>πŸš€ Launching benchmark for {backend}"
120
+ yield md_output
121
+
122
+ try:
123
+ benchmark_name = f"{timestamp}/{backend}"
124
+ benchmark_config = BenchmarkConfig(
125
+ name=benchmark_name,
126
+ backend=configs[backend],
127
+ launcher=configs[LAUNCHER],
128
+ scenario=configs[SCENARIO],
129
+ )
130
+ benchmark_config.push_to_hub(
131
+ repo_id=f"{username}/benchmarks",
132
+ subfolder=benchmark_name,
133
+ token=oauth_token.token,
134
+ )
135
+ benchmark_report = Benchmark.launch(benchmark_config)
136
+ benchmark_report.push_to_hub(
137
+ repo_id=f"{username}/benchmarks",
138
+ subfolder=benchmark_name,
139
+ token=oauth_token.token,
140
+ )
141
+ benchmark = Benchmark(config=benchmark_config, report=benchmark_report)
142
+ benchmark.push_to_hub(
143
+ repo_id=f"{username}/benchmarks",
144
+ subfolder=benchmark_name,
145
+ token=oauth_token.token,
146
+ )
147
+
148
+ md_output += (
149
+ f"<br>βœ… Benchmark for {backend} backend completed successfully"
150
+ )
151
+ yield md_output
152
+ except Exception as e:
153
+ md_output += (
154
+ f"<br>❌ Error while running benchmark for {backend} backend: {e}"
155
+ )
156
+ yield md_output
157
 
158
 
159
  def build_demo():
 
206
  with gr.Accordion(label="Process Config", open=False, visible=True):
207
  process_config = get_process_config()
208
 
209
+ with gr.Row():
210
+ with gr.Accordion(label="Scenario Config", open=False, visible=True):
211
+ inference_config = get_inference_config()
212
+
213
  with gr.Row() as backend_configs:
214
  with gr.Accordion(label="OnnxRuntime Config", open=False, visible=True):
215
  onnxruntime_config = get_onnxruntime_config()
 
229
  )
230
 
231
  with gr.Row():
232
+ button = gr.Button(value="Run Benchmark", variant="primary")
 
 
 
233
 
234
  with gr.Row():
235
+ md_output = gr.Markdown(label="Output", value="")
236
 
237
  button.click(
238
  fn=run_benchmark,
 
247
  *pytorch_config.values(),
248
  *ipex_config.values(),
249
  },
250
+ outputs=[md_output],
251
  concurrency_limit=1,
252
  )
253
 
packages.txt CHANGED
@@ -0,0 +1 @@
 
 
1
+ numactl