import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
import tensorflow as tf


class NN(object):

    def __init__(self):
        self.df = pd.read_csv('./timing/scenic_data.csv')

    def create_dataset(self,df,n_steps):
        X,y = [],[]
        for i in range(len(df) - n_steps):
            x_values = df[i: i+n_steps]
            x_values = pd.DataFrame(x_values)
            x_values.iloc[-1,x_values.columns.get_loc('count')] = 0
            
            X.append(x_values)
            y.append(df['count'][i+n_steps - 1])

        return np.array(X),np.array(y)


    def get_model(self):
        n_steps = 7 # 长度7天
        #data = self.df.values
        X,y = self.create_dataset(self.df, n_steps)

        #划分训练集和测试集
        train_size = int(len(X) * 0.8)
        X_train,X_test = X[:train_size],X[train_size:]
        y_train,y_test = y[:train_size],y[train_size:]

        #构建模型
        model = tf.keras.models.Sequential()
        model.add(tf.keras.layers.LSTM(50,activation='relu',return_sequences=True,input_shape=(n_steps,X_train.shape[2])))
        model.add(tf.keras.layers.LSTM(50,activation='relu'))
        model.add(tf.keras.layers.Dense(1))
        model.compile(optimizer='adam',loss='mse')
        model.fit(X_train,y_train,epochs=50,validation_data=(X_test,y_test))
        # 评估
        loss = model.evaluate(X_test,y_test)
        print(f"测试集损失：{loss:}")

        #保存模型
        tf.keras.models.save_model(model,'timing/my_model.keras')

if __name__ == '__main__':
    nn = NN()
    nn.get_model()