import numpy as np
import librosa
import os
import sys
import glob
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, activations, losses, metrics, optimizers, callbacks
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
from python_ai.common.xcommon import *
import signal

np.random.seed(1)
tf.random.set_seed(1)

VER = 'v8.0'
IS_GO_ON = False
BASE_EPOCH = None
BATCH_SIZE = 64
N_EPOCHS = 8
SAVE_FREQ = 4
ALPHA = 1e-5
BASE_DIR, FILE_NAME = os.path.split(__file__)
LOG_DIR = os.path.join(BASE_DIR, '_log', FILE_NAME, VER)
SAVE_DIR = os.path.join(BASE_DIR, '_save', FILE_NAME, VER)
dir = '../../../../large_data/audio/_many_files/direction_data_tidied/train'
DATA_DIR_TRAIN = os.path.join(BASE_DIR, dir)
dir = '../../../../large_data/audio/_many_files/direction_data_tidied/test'
DATA_DIR_TEST = os.path.join(BASE_DIR, dir)
VEC_DIR_TRAIN = os.path.join(SAVE_DIR, 'vectors', 'train')
VEC_DIR_TEST = os.path.join(SAVE_DIR, 'vectors', 'test')
MODEL_SAVE_DIR = os.path.join(SAVE_DIR, 'weights')
MODEL_SAVE_PATH = os.path.join(MODEL_SAVE_DIR, 'weights')
EPOCH_SAVE_PATH = os.path.join(SAVE_DIR, 'epochs.txt')


def extract_vectors(data_dir, vector_dir):
    os.makedirs(vector_dir, exist_ok=True)
    cnt = 0
    for sub_dir_name in os.listdir(data_dir):
        sub_dir_path = os.path.join(data_dir, sub_dir_name)
        if not os.path.isdir(sub_dir_path):
            continue
        vec_sub_dir_path = os.path.join(vector_dir, sub_dir_name)
        os.makedirs(vec_sub_dir_path, exist_ok=True)
        for file_name in os.listdir(sub_dir_path):
            _, ext = os.path.splitext(file_name)
            ext = ext.lower()
            if '.wav' != ext:
                continue
            vector_path = os.path.join(vec_sub_dir_path, file_name + '.txt')
            if os.path.exists(vector_path):
                continue
            file_path = os.path.join(sub_dir_path, file_name)
            x, sr = librosa.load(file_path, sr=None, res_type='kaiser_fast')
            mfcc = librosa.feature.mfcc(x, sr=sr, n_mfcc=100)
            mfcc = np.mean(mfcc, axis=1)
            np.savetxt(vector_path, mfcc)
            cnt += 1
            if cnt % 25 == 0:
                print(f'Processed {cnt} wav files.')


extract_vectors(DATA_DIR_TRAIN, VEC_DIR_TRAIN)
extract_vectors(DATA_DIR_TEST, VEC_DIR_TEST)


def load_vectors(vec_dir, label2idx=None):
    yi = 0
    x, y = [], []
    label2idx_new = {}
    for sub_dir_name in os.listdir(vec_dir):
        sub_dir_path = os.path.join(vec_dir, sub_dir_name)
        label2idx_new[sub_dir_name] = yi
        for file_name in os.listdir(sub_dir_path):
            file_path = os.path.join(sub_dir_path, file_name)
            mfcc = np.loadtxt(file_path)
            x.append(mfcc)
            if label2idx is None:
                y.append(yi)
            else:
                y.append(label2idx[sub_dir_name])
        yi += 1
    x = np.float32(x)
    y = np.int64(y)
    return x, y, label2idx_new


x_train, y_train, label2idx = load_vectors(VEC_DIR_TRAIN)
idx2label = {i: lbl for lbl, i in label2idx.items()}
n_train, n_vec = x_train.shape[:2]
n_cls = len(np.unique(y_train))
rand_idx = np.random.permutation(n_train)
x_train = x_train[rand_idx]
y_train = y_train[rand_idx]
x_test_val, y_test_val, _ = load_vectors(VEC_DIR_TEST, label2idx)
x_test, x_val, y_test, y_val = train_test_split(x_test_val, y_test_val, train_size=0.5, random_state=1, shuffle=True)
print('x_train', x_train.shape)
print('y_train', y_train.shape)
print('x_test', x_test.shape)
print('y_test', y_test.shape)
print('x_val', x_val.shape)
print('y_val', y_val.shape)

dl_train = tf.data.Dataset.from_tensor_slices((x_train, y_train))\
    .shuffle(1000).batch(BATCH_SIZE).prefetch(tf.data.experimental.AUTOTUNE)
dl_test = tf.data.Dataset.from_tensor_slices((x_test, y_test))\
    .shuffle(1000).batch(BATCH_SIZE).prefetch(tf.data.experimental.AUTOTUNE)
dl_val = tf.data.Dataset.from_tensor_slices((x_val, y_val))\
    .shuffle(1000).batch(BATCH_SIZE).prefetch(tf.data.experimental.AUTOTUNE)

inputs = keras.Input((n_vec,))
x = layers.Dense(200, activation=activations.relu)(inputs)
# x = layers.Dropout(0.2)(x)
x = layers.Dense(200, activation=activations.relu)(x)
# x = layers.Dropout(0.2)(x)
x = layers.Dense(n_cls, activation=activations.softmax)(x)
model = keras.Model(inputs, x)
model.summary()

model.compile(
    optimizer=optimizers.Adam(learning_rate=ALPHA),
    loss=losses.sparse_categorical_crossentropy,
    metrics=[metrics.sparse_categorical_accuracy]
)

need_training = False
base_epoch = 0
if len(glob.glob(MODEL_SAVE_PATH + '*')) > 0:
    print('Loading ...')
    if BASE_EPOCH is None:
        model.load_weights(MODEL_SAVE_PATH)
        with open(EPOCH_SAVE_PATH, 'r') as f:
            base_epoch = int(f.read())
    else:
        model.load_weights(MODEL_SAVE_PATH + f'.{BASE_EPOCH}')
        base_epoch = BASE_EPOCH
    print(f'Base epoch = {base_epoch}')
    print('Loaded.')
    if IS_GO_ON:
        need_training = True
else:
    need_training = True
if need_training:
    current_epoch = base_epoch


    def save_model():
        print('Saving ...')
        os.makedirs(MODEL_SAVE_DIR, exist_ok=True)
        with open(EPOCH_SAVE_PATH, 'w') as f:
            f.write(str(current_epoch))
        model.save_weights(MODEL_SAVE_PATH)
        print('Saved.')


    def signal_handler(sig, frame):
        save_model()
        sys.exit(0)


    signal.signal(signal.SIGINT, signal_handler)


    class MyCallback(callbacks.Callback):

        def on_epoch_begin(self, epoch, logs=None):
            global current_epoch
            current_epoch = epoch + 1
            print('current epoch', current_epoch)


    model.fit(
        dl_train,
        epochs=N_EPOCHS + base_epoch,
        validation_data=dl_val,
        initial_epoch=base_epoch,
        callbacks=[
            callbacks.TensorBoard(LOG_DIR, update_freq='epoch', profile_batch=0),
            MyCallback(),
            callbacks.ModelCheckpoint(
                MODEL_SAVE_PATH + '.{epoch:d}',
                verbose=1,
                save_weights_only=True,
                save_freq=len(dl_train) * SAVE_FREQ,
            )
        ]
    )
    save_model()

print('Testing ...')
model.evaluate(dl_test)

sep('Clf rpt')
y_pred = model.predict(dl_test).argmax(axis=1)

rpt = classification_report(y_test, y_pred)
print(rpt)
