import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
import tensorflow as tf

class NN(object):
    
    def __init__(self):
        self.df = pd.read_csv('./7/scenic_data.csv')
    
    def create_dataset(self,df,n_strps):
        '''构建数据
        '''
        X,y = [],[]
        for i in range(len(df) - n_strps):
            # X.append(data[i:i+n_strps])
            x_values = df[i:i+n_strps]
            x_values = pd.DataFrame(x_values)
            x_values.iloc[-1, x_values.columns.get_loc('count')] = 0
            X.append(x_values)
            y.append(df['count'][i+n_strps - 1])
        return np.array(X), np.array(y)
    
    def get_model(self):
        n_steps = 7  # 长度七天
        # df_pivot = pd.read_csv('./7/scenic_data.csv')
        # data = self.df.values
        # X, y = self.create_dataset(data, n_steps) 
        X, y = self.create_dataset(self.df, n_steps)  
    
        # 训练集与测试集划分
        train_size = int(len(X) * 0.8)
        X_train, X_test = X[:train_size], X[train_size:]
        y_train, y_test = y[:train_size], y[train_size:]
    
        model = tf.keras.models.Sequential()
        # model.add(tf.keras.layers.LSTM(50, activation='relu', return_sequences=True, input_shape=(n_steps, 1)))  
        model.add(tf.keras.layers.LSTM(50, activation='relu')) 
        model.add(tf.keras.layers.Dense(1))
        model.compile(optimizer='adam',loss='mse')
        model.fit(X_train,y_train,epochs=50,validation_data=(X_test,y_test))


        #评估
        loss = model.evaluate(X_test,y_test)
        print(f"测试集损失：{loss:}")
        
        #保存模型
        tf.keras.models.save_model(model,'7/my_model.keras')
        
if __name__ == '__main__':
    n = NN()
    n.get_model()













# if __name__ == '__main__':
#     # wu =WeatherUtils()
#     # wu.get_data()
#     mu = MysqlUtils()
    
    
    
    
    
    
    