# -*- encoding: utf-8 -*-
'''
@File    :   ann.py
@Time    :   2022/2/11 17:43
@Author  :   ZhangChaoYang
@Desc    :   基于多层全连接的分类器
'''
import os, sys

sys.path.insert(0, os.getcwd())
import tensorflow as tf
from tensorflow.keras.models import load_model
from tensorflow.keras import Model, Sequential
from tensorflow.keras.layers import ReLU, Dense, Flatten, Dropout, Softmax
from tensorflow.keras.optimizers import Adam
from util.view import draw_train_history, plot_confusion_matrix
from sklearn.metrics import confusion_matrix
from util.work_flow import read_sample, classes
import numpy as np
from sklearn.model_selection import train_test_split
from absl import flags
from absl import app

FLAGS = flags.FLAGS

flags.DEFINE_string('corpus', "jiangnan", '数据集，可选jiangnan、paderborn')
flags.DEFINE_string('data_dim', "1d", '数据集，可选1d、2d')
flags.DEFINE_string('data_trans', "original",
                    '数据集，data_dim=1d时data_trans可选original、fft、stat，data_dim=2d时data_trans可选sfft、cwt')
flags.DEFINE_boolean('multi_class', False,
                     '是否要多分类，即是否对故障进行具体的分类，二分类仅区分出正常样本和故障样本')  # 只要运行命令里有--multi_class那它最终的取值就是True
flags.DEFINE_string('wc_4_train', "", '用于训练模型的工况列表（用逗号分隔）')
flags.DEFINE_string('wc_4_valid', "", '用于测试模型的工况列表（用逗号分隔）')


class ANN(Model):
    def __init__(self, hidden_sizes, class_num):
        super(ANN, self).__init__()
        self.class_num = class_num
        layers = [Flatten(name="flatten")]
        for i, hidden_size in enumerate(hidden_sizes):
            layers.append(Dense(hidden_size, name="fc{}".format(i)))
            layers.append(ReLU(name="active{}".format(i)))
            layers.append(Dropout(0.5, name="dropout{}".format(i)))
        if class_num > 2:
            layers.append(Dense(class_num, name="fc{}".format(len(hidden_sizes))))
            layers.append(Softmax(name="class_softmax"))
        else:
            layers.append(Dense(1, name="fc{}".format(len(hidden_sizes))))
        self.model = Sequential(layers)

    def call(self, inputs, training=None, mask=None):
        return self.model(inputs)

    def train(self, x_train, y_train, x_test, y_test, learning_rate, epochs, chart_file=""):
        optim = Adam(learning_rate=learning_rate)
        self.compile(optim,
                     loss=tf.nn.softmax_cross_entropy_with_logits if self.class_num > 2 else tf.nn.sigmoid_cross_entropy_with_logits)
        history = self.fit(x=x_train, y=y_train, validation_data=(x_test, y_test), epochs=epochs, shuffle=True)
        draw_train_history(history, chart_file=chart_file, begin_epoch=0)


def diagnose(corpus, train_wc, test_wc, multi_class, data_dim, trans, epochs=100):
    class_num = 4
    learning_rate = 1e-3
    valid_ratio = 0.2
    model_name = "ann"

    model_dir = os.path.join("data", "model", data_dim, corpus, trans, model_name,
                             "multi_class" if multi_class else "binary_class")
    if not os.path.exists(model_dir):
        os.makedirs(model_dir)

    # model = ANN(hidden_sizes=[128, 64, 32, 16, 8], class_num=class_num if multi_class else 2)
    model = ANN(hidden_sizes=[16, 8], class_num=class_num if multi_class else 2)

    x_train, y_train = None, None
    for wc in train_wc:
        if len(wc.strip()) == 0:
            continue
        xarray, yarray = read_sample(os.path.join("corpus", "multi_class", data_dim, corpus, trans, wc), multi_class)
        if x_train is None:
            x_train = xarray
        else:
            x_train = np.vstack([x_train, xarray])
        if y_train is None:
            y_train = yarray
        else:
            y_train = np.vstack([y_train, yarray])
    x_test, y_test = None, None
    for wc in test_wc:
        if len(wc.strip()) == 0:
            continue
        xarray, yarray = read_sample(os.path.join("corpus", "multi_class", data_dim, corpus, trans, wc), multi_class)
        if x_test is None:
            x_test = xarray
        else:
            x_test = np.vstack([x_test, xarray])
        if y_test is None:
            y_test = yarray
        else:
            y_test = np.vstack([y_test, yarray])
    model_file_name = "-".join(train_wc)
    if x_test is None:  # test_wc为空
        model_file_name = "all"
        x_train, x_test, y_train, y_test = train_test_split(x_train, y_train, test_size=valid_ratio, random_state=42)

    train_file = os.path.join(model_dir, model_file_name + "_train_history.png")
    model.train(x_train, y_train, x_test, y_test, learning_rate, epochs, train_file)
    model_file = os.path.join(model_dir, model_file_name)
    model.save(model_file)

    model = load_model(model_file, compile=False)
    y_pred = model(x_test)
    verify_file = os.path.join(model_dir, model_file_name + "_verify.png")

    if multi_class:
        y = [np.where(r == 1)[0][0] for r in y_test]
        y_pred = [np.where(r == np.max(r))[0][0] for r in y_pred]
        cnf_matrix = confusion_matrix(y, y_pred)
        plot_confusion_matrix(cnf_matrix, classes, normalize=True, chart_file=verify_file)
    else:
        y_pred = np.where(y_pred > 0.5, 1, 0)
        cnf_matrix = confusion_matrix(y_test, y_pred)
        plot_confusion_matrix(cnf_matrix, ["anomly", "normal"], normalize=True, chart_file=verify_file)


def main(argv):
    corpus = FLAGS.corpus
    data_dim = FLAGS.data_dim
    data_trans = FLAGS.data_trans
    multi_class = FLAGS.multi_class
    print(multi_class)
    diagnose(corpus, train_wc=FLAGS.wc_4_train.split(","), test_wc=FLAGS.wc_4_valid.split(","), multi_class=multi_class,
             data_dim=data_dim, trans=data_trans)


if __name__ == '__main__':
    app.run(main)

# python fault_classification/ann.py --corpus jiangnan --data_dim 1d --data_trans original  --wc_4_train 600,800,1000
# python fault_classification/ann.py --corpus jiangnan --data_dim 1d --data_trans original --multi_class  --wc_4_train 600,800,1000
# python fault_classification/ann.py --corpus jiangnan --data_dim 1d --data_trans original --multi_class  --wc_4_train 600,1000  --wc_4_valid 800
