from matplotlib import pyplot as plt

from data import DataLoader
import tensorflow as tf
from numpy import genfromtxt

from model import MLP
from metrics import Metrics


import numpy

if __name__ == '__main__':
    num_epochs = 1000
    batch_size = 514
    learning_rate = 0.0001

    model = MLP()
    data_loader = DataLoader()
    optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)

    num_batches = int(data_loader.num_train_data // batch_size * num_epochs)

    """画图用的参数"""
    loss_list = []
    acc_list = []
    spe_list = []
    sen_list = []

    # """评估用的"""
    # sparse_categorical_accuracy = tf.keras.metrics.SparseCategoricalAccuracy()
    # num_batches = int(data_loader.num_test_data // batch_size)

    for batch_index in range(num_batches):
        X, y = data_loader.get_batch(batch_size)
        with tf.GradientTape() as tape:
            y_pred = model(X) + numpy.array([0, 0.11])  # [batch,2]

            # y_pred = y_pred + numpy.array([0.2, -0.2])

            # 评估每个batch的准确率
            y_pred_ = tf.argmax(y_pred, axis=1)
            # [batch_size] int64=>[batch_size] int32 索引

            loss = tf.keras.losses.sparse_categorical_crossentropy(y_true=y, y_pred=y_pred)  # 这个损失函数不需要对y_pred进行argmax
            loss = tf.reduce_mean(loss)

            loss_list.append(float(loss))  # 用于画图
            print("batch %d: loss %f " % (batch_index, loss.numpy()))
            train_metrics = Metrics(y, y_pred_)
            train_metrics.print_metrics()
            acc_list.append(train_metrics.accuracy())
            spe_list.append(train_metrics.specificity())
            sen_list.append(train_metrics.sensitivity())

        grads = tape.gradient(loss, model.variables)
        optimizer.apply_gradients(grads_and_vars=zip(grads, model.variables))

    """评估"""
    # sparse_categorical_accuracy = tf.keras.metrics.SparseCategoricalAccuracy()
    y_pred_test = tf.argmax(model.predict(data_loader.test_data) + numpy.array([0, 0.11]), axis=1)
    print(y_pred_test)
    # sparse_categorical_accuracy.update_state(y_true=data_loader.test_label, y_pred=y_pred_test)
    y_test = data_loader.test_label
    print(y_test)
    test_metrics = Metrics(y_test, y_pred_test)
    train_metrics.print_metrics()
    test_metrics.print_metrics()

    """画loss曲线"""
    batches = []
    for i in range(num_batches):
        batches.append(i + 1)
    plt.plot(batches, loss_list, 'r')
    plt.plot(batches, acc_list, 'b')
    plt.plot(batches, spe_list, 'g')
    plt.plot(batches, sen_list, 'p')
    plt.title('Training and validation loss')
    plt.title('Training loss and Training Acc')
    plt.xlabel("Epochs")
    plt.ylabel("Loss")
    plt.legend(["Loss", "Acc", "Spe", "Sen"])
    plt.figure()
    plt.show()
