import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
import tensorflow as tf

class NN(object):
    
    def __init__(self):
        self.df = pd.read_csv('./NN/scenic_data.csv')
    
    def create_dataset(self,data,n_steps):
        X,Y = [],[]
        for i in range(len(data) - n_steps):
            X.append(data[i:i+n_steps])
            Y.append(data[i + n_steps, :18])
            
        return np.array(X),np.array(Y)
    
    
    def get_model(self):
        n_steps = 7 #长度7天
        data= self.df.values
        X,Y =self.create_dataset(data,n_steps)
        
        #划分训练集和测试集
        train_size = int(len(X) *0.8)
        X_train,X_test = X[:train_size],X[train_size:]
        Y_train,Y_test = Y[:train_size],Y[train_size:]
        
        #构建模型
        model = tf.Keras.model.Sequentail()
        model.add(tf.Keras.layers.LSTM(50,activation='relu',return_sequences=True,input_shape=(n_steps,X_train.shape[2])))
        model.add(tf.Keras.layers.LSTM(50,activation='relu'))
        model.add(tf.layers.Dense(18))
        model.comile(optimizer='adam',loss='mse')
        model.fit(X_train,Y_train,epochs=50,validation_data=(X_test,Y_test))
        #评估
        loss=model.evaluate(X_test,Y_test)
        print(f"测试集损失:{loss:}")
        
        #保存模型
        tf.Keras.model.save_model(model,'NN/my_model.Keras')
        
if __name__ == '__main__':
    nn = NN()
    nn.get_model()
