import os
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
import tensorflow as tf

class NN(object):
    def __init__(self):
        self.df = pd.read_csv('scenic_data.csv')  # 直接读取当前目录下的 CSV 文件

    def create_dataset(self, data, n_steps):
        """构造数据"""
        X, y = [], []
        for i in range(len(data) - n_steps):
            X.append(data[i:i+n_steps])
            y.append(data[i+n_steps, :18])
        return np.array(X), np.array(y)

    def get_model(self):
        n_steps = 7  # 长度7天
        data = self.df.values
        X, y = self.create_dataset(data, n_steps)

        # 训练集与测试集划分
        train_size = int(len(X) * 0.8)
        X_train, X_test = X[:train_size], X[train_size:]
        y_train, y_test = y[:train_size], y[train_size:]

        model = tf.keras.models.Sequential()
        model.add(tf.keras.layers.LSTM(50, activation='relu', return_sequences=True, input_shape=(n_steps, X_train.shape[2])))
        model.add(tf.keras.layers.LSTM(50, activation='relu'))
        model.add(tf.keras.layers.Dense(18))
        model.compile(optimizer='adam', loss='mse')
        model.fit(X_train, y_train, epochs=50, validation_data=(X_test, y_test))

        # 评估
        loss = model.evaluate(X_test, y_test)
        print(f"测试集损失: {loss:}")

        # 获取当前文件的目录
        current_dir = os.path.dirname(os.path.abspath(__file__))
        
        # 构建模型保存路径
        model_save_path = os.path.join(current_dir, 'my_model.keras')

        # 保存模型
        tf.keras.models.save_model(model, model_save_path)
        print(f"模型已保存到 {model_save_path}")

if __name__ == '__main__':
    nn = NN()
    nn.get_model()