import numpy as np
import sys

np.random.seed(1)


def sigmoid(Z):
    A = 1. / (1. + np.exp(-Z))
    return A


class MLPLayerSigmoid(object):

    def __init__(self, n_neurons, n_inputs):
        self.n_neurons = n_neurons
        self.n_inputs = n_inputs
        self.coef = np.random.randn(n_inputs, n_neurons).astype(np.float32)
        self.bias = np.zeros([1, n_neurons], dtype=np.float32)

    def trainable_vars(self):
        return self.coef.size + self.bias.size

    def __call__(self, inputs, *args, **kwargs):
        self.inputs = inputs
        m, n = inputs.shape
        X = np.concatenate([
            np.ones([m, 1], dtype=np.float32),
            inputs
        ], axis=1)
        self.X = X
        W = np.concatenate([
            self.bias,
            self.coef
        ], axis=0)
        self.W = W
        Z = np.dot(X, W)
        A = sigmoid(Z)
        self.outputs = A
        return A


class MLPSigmoid(object):

    def __init__(self, input_size, n_neurons_list, alpha):
        self.alpha = alpha
        self.n_layers = len(n_neurons_list)
        n_inputs = input_size
        self.layers = []
        n_all_vars = 0
        for n_neurons in n_neurons_list:
            layer = MLPLayerSigmoid(n_neurons, n_inputs)
            n_vars = layer.trainable_vars()
            n_all_vars += n_vars
            print(f'{n_inputs} X {n_neurons} {n_vars}')
            self.layers.append(layer)
            n_inputs = n_neurons
        print(f'All vars: {n_all_vars}')

    def save_weights(self, path):
        vars = np.array([], dtype=np.float32)
        for layer in self.layers:
            vars = np.append(vars, layer.bias)
            vars = np.append(vars, layer.coef)
        np.save(path, vars, allow_pickle=False)

    def load_weights(self, path):
        vars = np.load(path)
        print('vars', vars.shape)
        pos = 0
        for layer in self.layers:
            pos_end = pos + layer.n_neurons
            layer.bias = vars[pos:pos_end].reshape(1, layer.n_neurons)
            pos = pos_end  # important
            pos_end = pos + layer.n_neurons * layer.n_inputs
            layer.coef = vars[pos:pos_end].reshape(layer.n_inputs, layer.n_neurons)
            pos = pos_end  # important
        print('pos', pos)

    def __call__(self, inputs, *args, **kwargs):
        for layer in self.layers:
            layer(inputs)
            inputs = layer.outputs
        self.outputs = inputs
        return inputs

    def cost(self, A, Y):
        m = len(Y)
        j = np.dot(Y.T, np.log(A)) + np.dot((1.0 - Y.T), np.log(1.0 - A))
        j = j[0][0] / -m
        return j

    def metrics(self, Y, A):
        Y = Y.astype(np.float32)
        m = len(Y)
        ones = np.ones_like(Y, dtype=np.int)
        TP = ones[(A > 0.5) & (Y > 0.5)].sum()
        TN = ones[(A < 0.5) & (Y < 0.5)].sum()
        FP = ones[(A > 0.5) & (Y < 0.5)].sum()
        FN = ones[(A < 0.5) & (Y > 0.5)].sum()
        acc = (TP + TN) / m
        recall = TP / (TP + FN + 1e-10)
        precision = TP / (TP + FP + 1e-10)
        f1 = (2 * recall * precision) / (recall + precision + 1e-10)
        return np.array([acc, recall, precision, f1], dtype=np.float32)

    def backward(self, Y):
        for i, layer in enumerate(self.layers[::-1]):
            A = layer.outputs
            m = len(layer.X)
            if 0 == i:
                dz = A - Y
            else:
                dz = delta * (A * (1.0 - A))
            if i < self.n_layers - 1:
                delta = np.dot(dz, layer.W.T)[:, 1:]
            dw = np.dot(layer.X.T, dz) / m
            layer.delta_bias = dw[0:1, :]
            layer.delta_coef = dw[1:, :]

    def step(self):
        for i, layer in enumerate(self.layers):
            layer.bias -= self.alpha * layer.delta_bias
            layer.coef -= self.alpha * layer.delta_coef


def dl(x_data, y_data, n_batch_size, is_train=False):
    """data loader"""
    n = len(x_data)
    n_batch = int(np.ceil(n / n_batch_size))

    if is_train:
        rand_idx = np.random.permutation(n)
        x_data = x_data[rand_idx]
        y_data = y_data[rand_idx]

    for i in range(n_batch):
        yield x_data[i * n_batch_size:(i + 1) * n_batch_size], y_data[i * n_batch_size:(i + 1) * n_batch_size]


def run_on_data(model, label, x_data, y_data, n_batch_size, is_train, epoch):
    n = len(x_data)
    n_batch = int(np.ceil(n / n_batch_size))
    cost_avg = 0.
    metrics_avg = None
    pred = np.array([])
    for i, (x_batch, y_batch) in enumerate(dl(x_data, y_data, n_batch_size, is_train)):
        h = model(x_batch)
        pred = np.append(pred, h)
        cost = model.cost(h, y_batch)
        metrics = model.metrics(y_batch, h)
        print(label, 'epoch:', epoch, 'batch:', i, 'cost:', cost, 'metrics:', metrics)
        cost_avg += cost / n_batch
        if metrics_avg is None:
            metrics_avg = np.zeros_like(metrics, dtype=np.float32)
        metrics_avg += metrics / n_batch
        if is_train:
            model.backward(y_batch)
            model.step()
    print(label, 'epoch:', epoch, 'cost_avg:', cost_avg, 'metrics:', metrics_avg)
    return cost_avg, metrics_avg, pred.reshape(*y_data.shape)


if '__main__' == __name__:
    import matplotlib.pyplot as plt
    import os
    from python_ai.common.xcommon import *
    from python_ai.category.data.mnist_bin_dataset import x_train, x_test, x_val, y_train, y_test, y_val, shape_

    np.random.seed(1)

    VER = 'v1.0'
    N_BATCH_SIZE = 256
    LR = 1e-1  # learning rate (It should be higher for all sigmoid layers than all relu ones)
    N_EPOCH = 700  # Please to be greater than 650 for good model
    BASE_DIR, FILE_NAME = os.path.split(__file__)
    SAVE_DIR = os.path.join(BASE_DIR, '_save', FILE_NAME, VER)
    os.makedirs(SAVE_DIR, exist_ok=True)
    SAVE_PATH_PREFIX = os.path.join(SAVE_DIR, f'MPLSigmoid_mnist_5_bin_{N_EPOCH}')
    SAVE_PATH = f'{SAVE_PATH_PREFIX}.npy'

    sep('Model')
    model = MLPSigmoid(x_test.shape[1], [128, 64, 32, 1], LR)

    is_train = False
    if os.path.exists(SAVE_PATH):
        sep('Load weight')
        print(f'Loading from {SAVE_PATH} ...')
        model.load_weights(SAVE_PATH)
        print('Loaded.')
    else:
        sep('Train')
        is_train = True

        cost_his, metrics_his = [], []
        cost_his_val, metrics_his_val = [], []
        for i in range(N_EPOCH):
            cost_avg, metrics_avg, *_ = run_on_data(model, 'train', x_test, y_test, N_BATCH_SIZE, True, i)
            cost_his.append(cost_avg)
            metrics_his.append(metrics_avg.tolist())
            cost_avg, metrics_avg, *_ = run_on_data(model, 'val', x_test, y_test, N_BATCH_SIZE, False, i)
            cost_his_val.append(cost_avg)
            metrics_his_val.append(metrics_avg.tolist())
        metrics_his = np.array(metrics_his)
        metrics_his_val = np.array(metrics_his_val)

        print(f'Saving to {SAVE_PATH} ...')
        model.save_weights(SAVE_PATH_PREFIX)
        print('Saved.')

    sep('Test')
    cost_avg, metrics_avg, pred = run_on_data(model, 'test', x_test, y_test, N_BATCH_SIZE, False, 0)

    plt.figure(figsize=[6, 6])
    spr = 4
    spc = 4
    spn = 0
    m = spr * spc
    for i in range(m):
        spn += 1
        plt.subplot(spr, spc, spn)
        y = y_test[i, 0]
        img = x_test[i].reshape(*shape_)
        h = pred[i, 0]
        plt.axis('off')
        plt.title(f'{y}=>{int(h>0.5)}({h:.2f})')
        plt.imshow(img)
    print('Please check and close the plotting window to continue ...')
    plt.show()

    if is_train:
        plt.figure(figsize=[12, 6])
        spr = 1
        spc = 2
        spn = 0

        spn += 1
        plt.subplot(spr, spc, spn)
        plt.title('Cost')
        plt.plot(cost_his, label='train')
        plt.plot(cost_his_val, label='val')
        plt.legend()
        plt.grid()

        spn += 1
        plt.subplot(spr, spc, spn)
        plt.title('Metrics')
        plt.plot(metrics_his[:, 0], label='acc_train')
        plt.plot(metrics_his_val[:, 0], label='acc_val')
        plt.plot(metrics_his[:, 1], label='recall_train')
        plt.plot(metrics_his_val[:, 1], label='recall_val')
        plt.plot(metrics_his[:, 2], label='precision_train')
        plt.plot(metrics_his_val[:, 2], label='precision_val')
        plt.plot(metrics_his[:, 3], label='f1_train')
        plt.plot(metrics_his_val[:, 3], label='f1_val')
        plt.legend()
        plt.grid()

        plt.show()
