asigalov61
commited on
Commit
•
52381e7
1
Parent(s):
3f70f5c
Update app.py
Browse files
app.py
CHANGED
@@ -233,17 +233,61 @@ def ClassifyMIDI(input_midi):
|
|
233 |
|
234 |
results.append(result)
|
235 |
|
236 |
-
all_results_labels = [classifier_labels[0][r-384] for r in results]
|
237 |
final_result = mode(results)
|
238 |
|
|
|
239 |
print('Done!')
|
240 |
print('=' * 70)
|
241 |
|
242 |
-
|
243 |
-
|
244 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
245 |
print('=' * 70)
|
246 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
247 |
classification_summary_string += 'Most common classification label: ' + str(classifier_labels[0][final_result-384]) + '\n'
|
248 |
classification_summary_string += 'Most common classification label ratio: ' + str(results.count(final_result) / len(results)) + '\n'
|
249 |
classification_summary_string += 'Most common classification label index '+ str(final_result) + '\n'
|
@@ -265,53 +309,13 @@ def ClassifyMIDI(input_midi):
|
|
265 |
print('=' * 70)
|
266 |
|
267 |
#===============================================================================
|
268 |
-
print('Rendering results...')
|
269 |
-
|
270 |
-
score_idx = processed_scores_labels.index(classifier_labels[0][final_result-384])
|
271 |
-
|
272 |
-
output_score = processed_scores[score_idx][1][:6000]
|
273 |
-
|
274 |
-
print('=' * 70)
|
275 |
-
print('Sample INTs', results[:15])
|
276 |
-
print('=' * 70)
|
277 |
-
|
278 |
-
fn1 = processed_scores[score_idx][0]
|
279 |
-
|
280 |
-
output_score = TMIDIX.recalculate_score_timings(output_score)
|
281 |
-
|
282 |
-
output_score, patches, overflow_patches = TMIDIX.patch_enhanced_score_notes(output_score)
|
283 |
-
|
284 |
-
detailed_stats = TMIDIX.Tegridy_ms_SONG_to_MIDI_Converter(output_score,
|
285 |
-
output_signature = 'Advanced MIDI Classifier',
|
286 |
-
output_file_name = fn1,
|
287 |
-
track_name='Project Los Angeles',
|
288 |
-
list_of_MIDI_patches=patches,
|
289 |
-
timings_multiplier=16
|
290 |
-
)
|
291 |
-
|
292 |
-
new_fn = fn1+'.mid'
|
293 |
-
|
294 |
-
|
295 |
-
audio = midi_to_colab_audio(new_fn,
|
296 |
-
soundfont_path=soundfont,
|
297 |
-
sample_rate=16000,
|
298 |
-
volume_scale=10,
|
299 |
-
output_for_gradio=True
|
300 |
-
)
|
301 |
-
|
302 |
-
print('Done!')
|
303 |
-
print('=' * 70)
|
304 |
|
305 |
#========================================================
|
306 |
|
307 |
output_midi_title = str(fn1)
|
308 |
output_midi_summary = classification_summary_string
|
309 |
-
output_midi = str(new_fn)
|
310 |
-
output_audio = (16000, audio)
|
311 |
|
312 |
-
|
313 |
-
|
314 |
-
print('Output MIDI file name:', output_midi)
|
315 |
print('Output MIDI title:', output_midi_title)
|
316 |
print('=' * 70)
|
317 |
|
@@ -323,7 +327,7 @@ def ClassifyMIDI(input_midi):
|
|
323 |
print('-' * 70)
|
324 |
print('Req execution time:', (reqtime.time() - start_time), 'sec')
|
325 |
|
326 |
-
return
|
327 |
|
328 |
# =================================================================================================
|
329 |
|
@@ -403,13 +407,9 @@ if __name__ == "__main__":
|
|
403 |
|
404 |
gr.Markdown("## Classification results")
|
405 |
|
406 |
-
|
407 |
-
output_midi_summary = gr.Textbox(label="MIDI classification summary")
|
408 |
-
output_audio = gr.Audio(label="Best classification match MIDI audio", format="wav", elem_id="midi_audio")
|
409 |
-
output_plot = gr.Plot(label="Best classification match MIDI score plot")
|
410 |
-
output_midi = gr.File(label="Best classification match MIDI file", file_types=[".mid"])
|
411 |
|
412 |
run_event = run_btn.click(ClassifyMIDI, [input_midi],
|
413 |
-
[
|
414 |
|
415 |
app.queue().launch()
|
|
|
233 |
|
234 |
results.append(result)
|
235 |
|
|
|
236 |
final_result = mode(results)
|
237 |
|
238 |
+
print('=' * 70)
|
239 |
print('Done!')
|
240 |
print('=' * 70)
|
241 |
|
242 |
+
result_toks = [final_result[0]-512, final_result[1]-512]
|
243 |
+
mc_song_artist = song_artist_tokens_to_song_artist(result_toks)
|
244 |
+
gidx = genre_labels_fnames.index(mc_song_artist)
|
245 |
+
mc_genre = genre_labels[gidx][1]
|
246 |
+
|
247 |
+
print('Most common classification genre label:', mc_genre)
|
248 |
+
print('Most common classification song-artist label:', mc_song_artist)
|
249 |
+
print('Most common song-artist classification label ratio:' , results.count(final_result) / len(results))
|
250 |
+
print('=' * 70)
|
251 |
+
|
252 |
+
print('All classification labels summary:')
|
253 |
+
print('=' * 70)
|
254 |
+
|
255 |
+
all_artists_labels = []
|
256 |
+
|
257 |
+
for i, res in enumerate(results):
|
258 |
+
result_toks = [res[0]-512, res[1]-512]
|
259 |
+
song_artist = song_artist_tokens_to_song_artist(result_toks)
|
260 |
+
gidx = genre_labels_fnames.index(song_artist)
|
261 |
+
genre = genre_labels[gidx][1]
|
262 |
+
print('Notes', i*170, '-', (i*170)+340, '===', genre, '---', song_artist)
|
263 |
+
|
264 |
+
artist_label = str_strip_artist(song_artist.split(' --- ')[1])
|
265 |
+
|
266 |
+
all_artists_labels.append(artist_label)
|
267 |
+
|
268 |
+
print('=' * 70)
|
269 |
+
|
270 |
+
mode_artist_label = mode(all_artists_labels)
|
271 |
+
mode_artist_label_count = all_artists_labels.count(mode_artist_label)
|
272 |
+
|
273 |
+
print('Aggregated artist classification label:', mode_artist_label)
|
274 |
+
print('Aggregated artist classification label ratio:', mode_artist_label_count / len(all_artists_labels))
|
275 |
+
|
276 |
+
print('=' * 70)
|
277 |
+
print('Done!')
|
278 |
print('=' * 70)
|
279 |
|
280 |
+
|
281 |
+
|
282 |
+
|
283 |
+
|
284 |
+
|
285 |
+
|
286 |
+
|
287 |
+
|
288 |
+
|
289 |
+
|
290 |
+
|
291 |
classification_summary_string += 'Most common classification label: ' + str(classifier_labels[0][final_result-384]) + '\n'
|
292 |
classification_summary_string += 'Most common classification label ratio: ' + str(results.count(final_result) / len(results)) + '\n'
|
293 |
classification_summary_string += 'Most common classification label index '+ str(final_result) + '\n'
|
|
|
309 |
print('=' * 70)
|
310 |
|
311 |
#===============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
312 |
|
313 |
#========================================================
|
314 |
|
315 |
output_midi_title = str(fn1)
|
316 |
output_midi_summary = classification_summary_string
|
|
|
|
|
317 |
|
318 |
+
print('Output MIDI file name:', mc_song_artist)
|
|
|
|
|
319 |
print('Output MIDI title:', output_midi_title)
|
320 |
print('=' * 70)
|
321 |
|
|
|
327 |
print('-' * 70)
|
328 |
print('Req execution time:', (reqtime.time() - start_time), 'sec')
|
329 |
|
330 |
+
return classification_summary_string
|
331 |
|
332 |
# =================================================================================================
|
333 |
|
|
|
407 |
|
408 |
gr.Markdown("## Classification results")
|
409 |
|
410 |
+
output_midi_cls_summary = gr.Textbox(label="MIDI classification summary")
|
|
|
|
|
|
|
|
|
411 |
|
412 |
run_event = run_btn.click(ClassifyMIDI, [input_midi],
|
413 |
+
[output_midi_cls_summary])
|
414 |
|
415 |
app.queue().launch()
|