TomRB22 commited on
Commit
9ec6403
1 Parent(s): 5b818f3

Started documenting code

Browse files
Files changed (1) hide show
  1. audio_methods.py +43 -5
audio_methods.py CHANGED
@@ -21,8 +21,17 @@ _SCALING_FACTORS = pd.Series(
21
  ) # Factors used to normalize song maps
22
 
23
  def midi_to_notes(midi_file: str) -> pd.DataFrame:
24
- # Convert midi file to "song map" (dataframe where each note is broken
25
- # into its components)
 
 
 
 
 
 
 
 
 
26
 
27
  pm = pretty_midi.PrettyMIDI(midi_file)
28
  instrument = pm.instruments[0]
@@ -48,17 +57,46 @@ def midi_to_notes(midi_file: str) -> pd.DataFrame:
48
  return notes_df / _SCALING_FACTORS # Scale
49
 
50
 
51
- def display_audio(pm: pretty_midi.PrettyMIDI, seconds=120):
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  waveform = pm.fluidsynth(fs=_SAMPLING_RATE)
53
  # Take a sample of the generated waveform to mitigate kernel resets
54
- waveform_short = waveform[:seconds*_SAMPLING_RATE]
 
 
 
 
55
  return display.Audio(waveform_short, rate=_SAMPLING_RATE)
56
 
57
 
58
  # Define function to convert song map to wav
59
 
60
  def map_to_wav(song_map: pd.DataFrame, out_file: str, velocity: int=100):
61
- # Convert "song map" to midi file (reverse process with respect to midi_to_notes)
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
  contracted_map = tf.squeeze(song_map)
64
  song_map_T = contracted_map.numpy().T
 
21
  ) # Factors used to normalize song maps
22
 
23
  def midi_to_notes(midi_file: str) -> pd.DataFrame:
24
+ """
25
+ Convert midi file to "song map" (dataframe where each note is broken
26
+ into its components)
27
+
28
+ Parameters:
29
+ midi_file (str): Path to the midi file.
30
+
31
+ Returns:
32
+ pd.Dataframe: 3xN matrix where each column is a note, composed of
33
+ pitch, duration and step.
34
+ """
35
 
36
  pm = pretty_midi.PrettyMIDI(midi_file)
37
  instrument = pm.instruments[0]
 
57
  return notes_df / _SCALING_FACTORS # Scale
58
 
59
 
60
+ def display_audio(pm: pretty_midi.PrettyMIDI, seconds=-1):
61
+ """
62
+ Display a song in PrettyMIDI format as a display.Audio object.
63
+ This method specially comes in useful in a jupyter notebook.
64
+
65
+ Parameters:
66
+ pm (str): PrettyMidi object containing a song.
67
+ seconds (int): Time fraction of the song to be displayed. When
68
+ set to -1, the full length is taken.
69
+
70
+ Returns:
71
+ display.Audio: Song as an object allowing for display.
72
+ """
73
+
74
  waveform = pm.fluidsynth(fs=_SAMPLING_RATE)
75
  # Take a sample of the generated waveform to mitigate kernel resets
76
+ if seconds == -1:
77
+ waveform_short = waveform[:]
78
+ else:
79
+ waveform_short = waveform[:seconds*_SAMPLING_RATE]
80
+
81
  return display.Audio(waveform_short, rate=_SAMPLING_RATE)
82
 
83
 
84
  # Define function to convert song map to wav
85
 
86
  def map_to_wav(song_map: pd.DataFrame, out_file: str, velocity: int=100):
87
+ """
88
+ Convert "song map" to midi file (reverse process with respect to midi_to_notes)
89
+
90
+ Parameters:
91
+ song_map (pd.DataFrame): Path to the midi file.
92
+ out_file (str): Name of the [CONTINUE].
93
+
94
+ Returns:
95
+ pd.Dataframe: 3xN matrix as a dataframe where each column is a note,
96
+ composed of pitch, duration and step.
97
+ """
98
+
99
+ #
100
 
101
  contracted_map = tf.squeeze(song_map)
102
  song_map_T = contracted_map.numpy().T