import json

import pandas as pd
import numpy as np
import tensorflow as tf
from sklearn.preprocessing import MinMaxScaler
import matplotlib.pyplot as plt

from NeuralNetworkTools import NeuralNetworkTools


class ModelTrain:
    def __init__(self):
        # 步长
        self.step = 24
        # 特征数量
        self.features = 6
        # 输出维度
        self.out_step = 24
        # 站点编码
        self.MN = 'A230700_2003'
        # 训练数据文件
        self.trainFilePath = './train_files/A230700_2003.csv'
        # 模型保存目录
        self.modelFilePath = './model/A230700_2003.keras'
        self.columnIndex = [2, 3, 4, 5, 6, 7]
        self.columnName = ['PH', '溶解氧', '高锰酸盐指数', '氨氮', '总磷', '总氮']
        self.f_index = 0
        self.splitSize = 0.9
        self.trainX = []
        self.trainY = []
        self.testX = []
        self.testY = []
        self.X = []
        self.Y = []

    def drawing_learning_curve(self, history):
        # acc = history.history['acc']
        # val_acc = history.history['val_acc']
        loss = history.history['loss']
        val_loss = history.history['val_loss']
        epochs = range(1, len(loss) + 1)
        # "bo" is for "blue dot"
        plt.plot(epochs, loss, 'r', label='Training loss')
        # b is for "solid blue line"
        plt.plot(epochs, val_loss, 'b', label='Validation loss')
        plt.title('Training and validation loss')
        plt.xlabel('Epochs')
        plt.ylabel('Loss')
        plt.legend()
        plt.show()

    def get_train_data(self):
        neural_network_tools = NeuralNetworkTools()
        dataset1 = pd.read_csv(self.trainFilePath, header=0, usecols=self.columnIndex)
        neural_network_tools.fillNaN(dataset1)
        minList, maxList = np.min(dataset1.values, axis=0), np.max(dataset1.values, axis=0)
        # 写入特征值的最大值、最小值到配置文件,反归一化使用
        config_json = {}
        for i in range(len(self.columnName)):
            config_json[self.columnName[i]] = [minList[i], maxList[i]]
        # 打开文件进行写入，如果文件不存在则创建
        with open(f'config/{self.MN}.json', 'w') as file:
            file.write(json.dumps(config_json))  # 将内容写入文件
        # 数据处理，归一化至0~1之间
        scaler = MinMaxScaler(feature_range=(0, 1))
        data1 = scaler.fit_transform(dataset1.values)
        data1 = neural_network_tools.series_to_supervised(n_data=data1, n_in=self.step, n_out=self.out_step,dropnan=True)
        data1 = np.asarray(data1).astype(float)

        self.X = data1[:, 0:self.features * self.step]
        self.Y = data1[:, self.features * self.step:]

        # split into train and test sets
        train_size = int(self.X.shape[0] * self.splitSize)
        self.trainX = self.X[0:train_size, :]
        self.testX = self.X[train_size:, :]

        self.trainY = self.Y[0:train_size, :]
        self.testY = self.Y[train_size:, :]

        self.trainX = np.reshape(self.trainX, (self.trainX.shape[0], self.step, self.features))

        self.testX = np.reshape(self.testX, (self.testX.shape[0], self.step, self.features))

    # 训练模型
    def train(self):
        self.get_train_data()
        print(self.trainX.shape)
        print(self.trainY.shape)
        # 创建LSTM神经网络模型
        model = tf.keras.Sequential()
        model.add(tf.keras.layers.LSTM(120, input_shape=(self.trainX.shape[1], self.trainX.shape[2]),
                                       return_sequences=False))  # 输入维度为1，时间窗的长度为1，隐含层神经元节点个数为120

        # model.add(LSTM(50, return_sequences=False))
        # model.add(LSTM(50, return_sequences=False))
        model.add(tf.keras.layers.Dense(self.out_step * self.features))
        optimizer = tf.keras.optimizers.Adam(learning_rate=0.01)
        model.compile(optimizer=optimizer, loss='mean_squared_error')
        checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath=self.modelFilePath, monitor='loss',
                                                        mode='auto', save_best_only='True',
                                                        save_weights_only=False)

        callback_lists = [checkpoint]

        history = model.fit(self.trainX, self.trainY, validation_data=(self.testX, self.testY), epochs=200,
                            batch_size=100,
                            verbose=2, callbacks=callback_lists)
        self.drawing_learning_curve(history)


if __name__ == '__main__':
    modelTrain = ModelTrain()
    modelTrain.train()
