Spaces:
Runtime error
Runtime error
File size: 1,892 Bytes
9c0c5c8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
import warnings
warnings.filterwarnings('ignore')
import librosa
import numpy as np
from PIL import Image
class Mel:
def __init__(
self,
x_res=256,
y_res=256,
sample_rate=22050,
n_fft=2048,
hop_length=512,
top_db=80,
):
self.x_res = x_res
self.y_res = y_res
self.sr = sample_rate
self.n_fft = n_fft
self.hop_length = hop_length
self.n_mels = self.y_res
self.slice_size = self.x_res * self.hop_length - 1
self.fmax = self.sr / 2
self.top_db = top_db
self.y = None
def load_audio(self, audio_file):
self.y, _ = librosa.load(audio_file, mono=True)
def get_number_of_slices(self):
return len(self.y) // self.slice_size
def get_sample_rate(self):
return self.sr
def audio_slice_to_image(self, slice):
S = librosa.feature.melspectrogram(
y=self.y[self.slice_size * slice : self.slice_size * (slice + 1)],
sr=self.sr,
n_fft=self.n_fft,
hop_length=self.hop_length,
n_mels=self.n_mels,
fmax=self.fmax,
)
log_S = librosa.power_to_db(S, ref=np.max, top_db=self.top_db)
bytedata = (
((log_S + self.top_db) * 255 / self.top_db).clip(0, 255) + 0.5
).astype(np.uint8)
image = Image.frombytes("L", log_S.shape, bytedata.tobytes())
return image
def image_to_audio(self, image):
bytedata = np.frombuffer(image.tobytes(), dtype="uint8").reshape(
(image.width, image.height)
)
log_S = bytedata.astype("float") * self.top_db / 255 - self.top_db
S = librosa.db_to_power(log_S)
audio = librosa.feature.inverse.mel_to_audio(
S, sr=self.sr, n_fft=self.n_fft, hop_length=self.hop_length
)
return audio
|