import matplotlib.pyplot as plt
import tensorflow as tf
from sklearn import datasets
from sklearn.model_selection import train_test_split
from tensorflow.keras import Model, layers, activations

from notebook.plot_utils import plot_decision_boundary


class MLP(Model):
    def __init__(self, num_classes=3):
        super(MLP, self).__init__()
        self.hidden_layer = layers.Dense(3, activation=activations.relu)
        self.output_layer = layers.Dense(num_classes, activation=activations.softmax)

    def call(self, inputs, training=None, **kwargs):
        x = self.hidden_layer(inputs)
        x = self.output_layer(x)
        return x


@tf.function
def _train_step(x, y, model, optimizer, loss_object, train_loss, train_accuracy):
    with tf.GradientTape() as tape:
        y_pred = model(x, training=True)
        # print("=> label shape: ", y.shape, "pred shape", y_pred.shape)
        loss = loss_object(y, y_pred)
    gradients = tape.gradient(loss, model.trainable_variables)
    optimizer.apply_gradients(zip(gradients, model.trainable_variables))
    train_loss(loss)
    train_accuracy(y, y_pred)


@tf.function
def _val_step(x, y, model, val_accuracy):
    y_pred = model(x)
    val_accuracy(y, y_pred)


def build_model(train_data, val_data, learning_rate=0.001, epochs=100, print_cost=False):
    # keep track of cost
    costs = []
    train_accs = []
    val_accs = []

    model = MLP()
    # Stochastic gradient descent optimizer.
    optimizer = tf.optimizers.SGD(learning_rate)

    loss_object = tf.keras.losses.SparseCategoricalCrossentropy()
    train_loss = tf.keras.metrics.Mean(name='train_loss', dtype=tf.float32)
    train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')
    val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='val_accuracy')

    for i in range(epochs):
        for x_train, y_train in train_data:
            _train_step(x_train, y_train, model, optimizer, loss_object, train_loss, train_accuracy)

        for x_val, y_val in val_data:
            _val_step(x_val, y_val, model, val_accuracy)

        if print_cost and (i + 1) % 10 == 0:
            template = "Cost after iteration {}: {}, Accuracy on train data: {}, Accuracy on validation data: {}"
            print(template.format(i + 1, train_loss.result(), train_accuracy.result(), val_accuracy.result()))

        costs.append(train_loss.result())
        train_accs.append(train_accuracy.result())
        val_accs.append(val_accuracy.result())

    return model, costs, train_accs, val_accs


def main():
    X, y = datasets.make_blobs(n_samples=10000, n_features=2, centers=3, random_state=0)
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
    X_val, X_test, y_val, y_test = train_test_split(X_test, y_test, test_size=0.5, random_state=42)

    train_data = tf.data.Dataset.from_tensor_slices(
        (X_train, y_train)).shuffle(8000, reshuffle_each_iteration=True).batch(32)
    val_data = tf.data.Dataset.from_tensor_slices((X_val, y_val)).batch(32)

    model, costs, train_accs, val_accs = build_model(train_data, val_data,
                                                     learning_rate=0.001, epochs=10, print_cost=True)

    print(X_test.shape, y_test.shape)
    X_plot = X_test.T
    y_plot = y_test.reshape((1, y_test.shape[0]))
    plot_decision_boundary(lambda x: model(x), X_plot, y_plot)
    plt.show()

    # test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='test_accuracy')


if __name__ == '__main__':
    main()
