josuelmet commited on
Commit
3ea2913
1 Parent(s): e11cb6f

Upload _Compressor.py

Browse files
Files changed (1) hide show
  1. _Compressor.py +208 -0
_Compressor.py ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '''
2
+ Imports
3
+ '''
4
+ import guitarpro
5
+ from guitarpro import *
6
+ import numpy as np
7
+ import os
8
+ import pickle
9
+ from tqdm import tqdm
10
+
11
+ from keras.utils import np_utils
12
+
13
+ from _NoteData import NoteData
14
+
15
+
16
+ '''
17
+ Constants
18
+ '''
19
+ # PITCH[i] = the pitch associated with midi note number i.
20
+ # For example, PITCH[69] = 'A4'
21
+ PITCH = {val : str(GuitarString(number=0, value=val)) for val in range(128)}
22
+ # MIDI[string] = the midi number associated with the note described by string.
23
+ # For example, MIDI['A4'] = 69.
24
+ MIDI = {str(GuitarString(number=0, value=val)) : val for val in range(128)}
25
+
26
+
27
+
28
+
29
+ '''
30
+ process_notes function
31
+ '''
32
+ def process_notes(beat, tuning, as_fingerings=True):
33
+
34
+ noteData = NoteData()
35
+
36
+ duration = (beat.duration.value, beat.duration.isDotted)
37
+
38
+ # Tuplets are cool but rare.
39
+ # If a tuplet is found, simply halve its play time (by doubling its duration value) to simplify things.
40
+ if beat.duration.tuplet.enters != 1 or beat.duration.tuplet.times != 1:
41
+ duration = (duration[0] * 2, duration[1]) # Tuples aren't mutable, so just re-assign the tuple.
42
+
43
+ noteData.duration = duration[0]
44
+ noteData.isDotted = duration[1]
45
+
46
+ if len(beat.notes) == 0:
47
+ # return 'rest', duration[0], duration[1], False
48
+ noteData.value = 'rest'
49
+ return noteData
50
+
51
+ noteData.palmMute = beat.notes[0].effect.palmMute
52
+
53
+
54
+ note_types = [note.type for note in beat.notes]
55
+
56
+
57
+ if all(note_type == NoteType.rest for note_type in note_types):
58
+ #return 'rest', duration[0], duration[1], False
59
+ noteData.value = 'rest'
60
+ return noteData
61
+
62
+ if all(note_type == NoteType.tie for note_type in note_types):
63
+ #return 'tied', duration[0], duration[1], False
64
+ noteData.value = 'tied'
65
+ return noteData
66
+
67
+ if all(note_type == NoteType.dead for note_type in note_types):
68
+ # return 'dead', duration[0], duration[1], False
69
+ noteData.value = 'dead'
70
+ return noteData
71
+
72
+
73
+
74
+ lowest_string = len(tuning)
75
+
76
+
77
+ if as_fingerings:
78
+ # NEW CODE: Represent each pitch as its distance (in semitones) from the tuning of the lowest string.
79
+ pitches = np.array([note.value + tuning[note.string] - tuning[lowest_string] for note in beat.notes if note.type == NoteType.normal])
80
+ else:
81
+ # note_number = MIDI note number, where A4 = 440 Hz = note 69
82
+ # OLD CODE:
83
+ pitches = np.array([note.value + tuning[note.string] for note in beat.notes if note.type == NoteType.normal])
84
+
85
+ # Remove any possible NaN values.
86
+ pitches = pitches[~np.isnan(pitches)]
87
+
88
+
89
+ # Pitches are often stored in descending order, but we want to make sure they're in ascending order.
90
+ # Thus, we flip the pitches before sorting, so as to help the algorithm.
91
+ pitches = np.sort(pitches[::-1])
92
+
93
+ if len(pitches) == 0:
94
+ #return 'rest', duration[0], duration[1]
95
+ noteData.value = 'rest'
96
+ return noteData
97
+
98
+ if len(pitches) == 1:
99
+ if as_fingerings:
100
+ # NEW CODE:
101
+ # return str(pitches[0]), duration[0], duration[1]
102
+ noteData.value = str(pitches[0])
103
+ return noteData
104
+ else:
105
+ # OLD CODE:
106
+ # return PITCH[pitches[0]], duration[0], duration[1]
107
+ noteData.value = PITCH[pitches[0]]
108
+ return noteData
109
+
110
+ # Look at the pitch intervals in the lowest 3 notes that are being played.
111
+ # Usually, chords will start at the lowest 2 notes.
112
+ # However, sometimes players will strum the open lowest string constantly throughout the song.
113
+ # (see: 'Be Quiet and Drive', 'Kaiowas')
114
+ # Thus, the next-highest pair of notes should be considered when labeling a chord.
115
+ if len(pitches) == 2:
116
+ note_pairs = [(0, 1)]
117
+ if len(pitches) == 3:
118
+ note_pairs = [(0, 1), (0, 2), (1, 2)]
119
+ elif len(pitches) >= 4:
120
+ note_pairs = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 3)]
121
+
122
+ for idx1, idx2 in note_pairs:
123
+
124
+ interval = pitches[idx2] - pitches[idx1]
125
+
126
+ if interval == 12 or interval == 7:
127
+ # Return a power chord associated with pitches[idx1]
128
+ if as_fingerings:
129
+ # NEW CODE:
130
+ # return str(pitches[idx1]) + '_5', duration[0], duration[1]
131
+ noteData.value = str(pitches[idx1]) + '_5'
132
+ return noteData
133
+ else:
134
+ # OLD CODE:
135
+ # return PITCH[pitches[idx1]] + '_5', duration[0], duration[1]
136
+ noteData.value = PITCH[pitches[idx1]] + '_5'
137
+ return noteData
138
+
139
+ if interval == 6:
140
+ # Return a tritone chord associated with pitches[idx1]
141
+ if as_fingerings:
142
+ # NEW CODE:
143
+ # return str(pitches[idx1]) + '_dim5', duration[0], duration[1]
144
+ noteData.value = str(pitches[idx1]) + '_dim5'
145
+ return noteData
146
+ else:
147
+ # OLD CODE:
148
+ # return PITCH[pitches[idx1]] + '_dim5', duration[0], duration[1]
149
+ noteData.value = PITCH[pitches[idx1]] + 'dim_5'
150
+ return noteData
151
+
152
+ if interval == 5:
153
+ # Return a P4 chord associated with pitches[idx1]
154
+ if as_fingerings:
155
+ # return str(pitches[idx1]) + '_4', duration[0], duration[1]
156
+ noteData.value = str(pitches[idx1]) + '_4'
157
+ return noteData
158
+ else:
159
+ # return PITCH[pitches[idx1]] + '_4', duration[0], duration[1]
160
+ noteData.value = PITCH[pitches[idx1]] + '_4'
161
+ return noteData
162
+
163
+
164
+
165
+ if as_fingerings:
166
+ # NEW CODE:
167
+ #return str(pitches[0]), duration[0], duration[1]
168
+ noteData.value = str(pitches[0])
169
+ return noteData
170
+ else:
171
+ # OLD CODE:
172
+ # return PITCH[pitches[0]], duration[0], duration[1]
173
+ noteData.value = PITCH[pitches[0]]
174
+ return noteData
175
+
176
+
177
+
178
+
179
+ '''
180
+ compress_track function
181
+ '''
182
+ def compress_track(track, as_fingerings=True):
183
+ # 'song' contains the compressed representation of track.
184
+ song = np.empty(len(track.measures), dtype=object)
185
+
186
+ # Get the tuning and lowest string of the instrument in this track.
187
+ tuning = {string.number : string.value for string in track.strings}
188
+ lowest_string = len(tuning) # Bass have 4-6 strings, while metal guitars have 6 - 8 strings.
189
+
190
+ #print(f'Tuning = {[PITCH[x] for x in tuning.values()]}')
191
+
192
+ for m_i, measure in enumerate(track.measures):
193
+ '''
194
+ Upon inspection of some of the most popular Songsterr .gp5 tabs,
195
+ it turns out that each measure always has two Voices.
196
+ The first Voice (index 0) always contains music, while
197
+ the second Voice (index 1) always just contains an empty Beat with no notes.
198
+
199
+ Therefore, only the first Voice (index 0) actually matters.
200
+ '''
201
+ song[m_i] = []
202
+
203
+ #print(m_i+1)
204
+ for b_i, beat in enumerate(measure.voices[0].beats):
205
+ song[m_i].append(process_notes(beat, tuning, as_fingerings).as_tuple())
206
+ #print('\t', song[m_i][b_i], '\t', beat.duration)
207
+
208
+ return song