Spaces:
Sleeping
Sleeping
File size: 1,858 Bytes
27b1390 b1d66c1 27b1390 afa4c92 27b1390 4bb84bd afa4c92 269c399 4bb84bd afa4c92 b1d66c1 afa4c92 4bb84bd b1d66c1 afa4c92 4bb84bd b1d66c1 4bb84bd b1d66c1 4bb84bd afa4c92 4bb84bd afa4c92 4bb84bd afa4c92 4bb84bd 756b159 afa4c92 b1d66c1 4bb84bd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
import gradio as gr
import allin1
from pathlib import Path
DESCRIPTION = """
# All-In-One Music Structure Analyzer
This Space demonstrates [All-In-One Music Structure Analyzer](https://github.com/mir-aidj/all-in-one),
a tool that predicts the BPM, beats, downbeats, segment boundaries, and functional segment labels from audio files.
For more information, please visit:
- Python package: [`allin1`](https://github.com/mir-aidj/all-in-one)
- Structure visualizer: [Music Dissector](https://taejun.kim/music-dissector/)
- Paper: [All-In-One Metrical And Functional Structure Analysis With Neighborhood Attentions on Demixed Audio
](https://arxiv.org/abs/2307.16425)
"""
def analyze(path):
path = Path(path)
result = allin1.analyze(
path,
multiprocess=False,
keep_byproducts=True, # TODO: remove this
)
fig = allin1.visualize(result)
allin1.sonify(result, out_dir='./sonif')
sonif_path = Path(f'./sonif/{path.stem}.sonif{path.suffix}').resolve().as_posix()
return result.bpm, fig, sonif_path
with gr.Blocks() as demo:
gr.Markdown(DESCRIPTION)
input_audio_path = gr.Audio(
label='Input',
source='upload',
type='filepath',
format='mp3',
show_download_button=False,
)
button = gr.Button('Analyze', variant='primary')
output_bpm = gr.Textbox(label='BPM')
output_viz = gr.Plot(label='Visualization')
output_sonif = gr.Audio(
label='Sonification',
type='filepath',
format='mp3',
show_download_button=False,
)
gr.Examples(
examples=[
'./assets/NewJeans - Super Shy.mp3',
],
inputs=input_audio_path,
outputs=[output_bpm, output_viz, output_sonif],
fn=analyze,
cache_examples=True,
)
button.click(
fn=analyze,
inputs=input_audio_path,
outputs=[output_bpm, output_viz, output_sonif],
api_name='analyze',
)
demo.queue().launch()
|