import numpy as np
from constant import Constant
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
import os
import matplotlib.pyplot as plt
import re
from tqdm import tqdm


class DatasetUtils:

    @staticmethod
    def generate_dataset():
        """将原始的序列数据进行窗口划分
        """
        features_index = Constant.FEATURES_CHOOSE
        data_cols = Constant.TARGET + Constant.FEATURES_MAP[features_index]
        print(f'data cols is {data_cols}')
        data = pd.read_csv(Constant.RAW_DATA_PATH, usecols=data_cols)

        target_cols = Constant.TARGET
        print(f'target cols is {target_cols}')
        target = pd.read_csv(Constant.RAW_DATA_PATH, usecols=target_cols)

        tgt_in_cols = Constant.TARGET_LAG + Constant.FEATURES_MAP[features_index]
        print(f'tgt_in cols is {tgt_in_cols}')
        tgt_in = pd.read_csv(Constant.RAW_DATA_PATH, usecols=tgt_in_cols)

        data = data.to_numpy()
        target = target.to_numpy()
        tgt_in = tgt_in.to_numpy()

        # 预处理, 归一化
        data = MinMaxScaler(feature_range=(-1, 1)).fit_transform(data)
        target = MinMaxScaler(feature_range=(-1, 1)).fit_transform(target)
        tgt_in = MinMaxScaler(feature_range=(-1, 1)).fit_transform(tgt_in)

        # 划分训练集和测试
        div = int(data.shape[0] * 0.8)
        train_data = data[:div]
        test_data = data[div:]

        train_tgt = target[:div]
        test_tgt = target[div:]

        train_tgt_in = tgt_in[:div]
        test_tgt_in = tgt_in[div:]

        train_data, train_label, train_tgt_in = DatasetUtils.generate_samples(train_data, train_tgt, train_tgt_in)
        test_data, test_label, test_tgt_in = DatasetUtils.generate_samples(test_data, test_tgt, test_tgt_in)
        print(f'train_data shape:{train_data.shape}, train_label shape:{train_label.shape}, tgt:{train_tgt_in.shape}')

        print(f'共有训练集：{train_data.shape[0]}条，测试集{test_data.shape[0]}条')
        save_path = os.path.join(Constant.TRAIN_PATH, 'train_data.npy')
        np.save(save_path, train_data)
        save_path = os.path.join(Constant.TRAIN_PATH, 'train_target.npy')
        np.save(save_path, train_label)
        save_path = os.path.join(Constant.TRAIN_PATH, 'train_tgt_in.npy')
        np.save(save_path, train_tgt_in)

        save_path = os.path.join(Constant.TEST_PATH, 'test_data.npy')
        np.save(save_path, test_data)
        save_path = os.path.join(Constant.TEST_PATH, 'test_target.npy')
        np.save(save_path, test_label)
        save_path = os.path.join(Constant.TEST_PATH, 'test_tgt_in.npy')
        np.save(save_path, test_tgt_in)

    @staticmethod
    def generate_samples(raw_data: np.ndarray, target: np.ndarray, raw_tgt_in: np.ndarray):
        """
        利用滑窗生成样本
        :param raw_tgt_in: 编码器输入数据
        :param raw_data: 原始数据输入，包括了需要预测的流量数据和一些其他的天气等数据，shape：[数据条数，变量维度]
        :param target: 标签数据， 只包含需要徐策的流量数据，shape：[数据条数，1]
        :return: 返回numpy数组，data的shape为 [样本总数，窗口长度，特征维度], label的shape：[样本总数，窗口长度，1]
        """
        res_data = []
        res_label = []
        res_tgt_in = []
        length = raw_data.shape[0]
        seen = Constant.INPUT_WINDOW
        blind = Constant.OUTPUT_WINDOW
        for i in range(length - seen - blind):
            data = raw_data[i:i + seen]
            label = target[i + blind:i + seen + blind]
            tgt_in = raw_tgt_in[i:i + seen]
            res_data.append(data)
            res_label.append(label)
            res_tgt_in.append(tgt_in)
        return np.array(res_data), np.array(res_label), np.array(res_tgt_in)

    @staticmethod
    def add_date():
        """将日期中的年、月、日变量作为变量"""
        data = pd.read_csv(Constant.RAW_DATA_PATH)
        data['date'] = pd.to_datetime(data['date'], format="%Y-%m-%d")

        data['day_of_week'] = data['date'].dt.dayofweek / 6
        data['day_of_month'] = data['date'].dt.day / 31
        data['day_of_year'] = data['date'].dt.dayofyear / 365
        data['week_of_year'] = data['date'].dt.isocalendar().week / 53
        data['month'] = data['date'].dt.month / 12
        data['year'] = (data['date'].dt.year - 2018) / (2022 - 2018)
        data.to_csv(Constant.RAW_DATA_PATH, index=False, encoding='utf_8_sig')

    @staticmethod
    def create_lag_data():
        """编码器的tgt_in输入"""
        data = pd.read_csv(Constant.RAW_DATA_PATH)
        data['one_lag_1'] = data['one'].shift(1, fill_value=0)
        data['total_lag_1'] = data['total'].shift(1, fill_value=0)
        data.to_csv(Constant.RAW_DATA_PATH, index=False, encoding='utf_8_sig')

    @staticmethod
    def weather_change_format():
        """修改爬虫得到得数据格式"""
        weather_raw = pd.read_csv(Constant.DATA_PATH + 'weather_raw.csv', encoding='GBK')
        weather_column = ["date", "weather1", "weather2", "temperature_low", "temperature_high", "wind"]
        weather = pd.DataFrame(columns=weather_column)
        weather['date'] = weather_raw['date']
        for i, row in tqdm(enumerate(weather_raw.itertuples())):
            w = row.weather.replace(" ", "").split('/')
            weather.at[i, 'weather1'] = w[0]
            weather.at[i, 'weather2'] = w[1]
            t = row.t.replace(" ", "").split('/')
            weather.at[i, 'temperature_low'] = t[0][:-1]
            weather.at[i, 'temperature_high'] = t[1][:-1]
            wind = re.findall(r'\d+', row.wind)
            wind = np.array(wind, dtype=int)
            weather.at[i, 'wind'] = wind.mean()
        weather.to_csv(Constant.WEATHER_DATA_PATH, index=False, encoding='GBK')

    @staticmethod
    def add_covariance():
        """将爬虫得到的协变量添加到原始数据中"""
        data_flow = pd.read_csv(Constant.RAW_DATA_PATH)
        data_flow['date'] = pd.to_datetime(data_flow['date'])
        start, end = data_flow.date[0], data_flow.date[data_flow.shape[0] - 1]
        data_weather = pd.read_csv(Constant.WEATHER_DATA_PATH, encoding='GBK', date_parser=True)
        data_weather['date'] = pd.to_datetime(data_weather['date'])
        data_weather = data_weather[(data_weather.date >= start) & (data_weather.date <= end)]
        data_weather = data_weather.drop_duplicates(subset=['date'])  # 去重，爬虫得到的数据并不干净
        # print(data_flow.shape)
        # print(data_weather.shape)
        data = pd.merge(data_flow, data_weather, how='inner', on='date')
        print(data)
        data.to_csv(Constant.RAW_DATA_PATH, index=False, encoding='utf_8_sig')

    @staticmethod
    def add_weather():
        """将天气（下雨、晴天、多云等）因素添加到原始数据中"""
        data = pd.read_csv(Constant.RAW_DATA_PATH)
        # data['weather1'] = data['weather1'].apply(lambda x: Constant.weather_json[x])
        # data['weather2'] = data['weather2'].apply(lambda x: Constant.weather_json[x])
        data['weather_mean'] = (data['weather1'] + data['weather2']) / 2
        data.to_csv(Constant.RAW_DATA_PATH, index=False, encoding='utf_8_sig')

    @staticmethod
    def add_temperature_mean():
        """计算气温均值"""
        data = pd.read_csv(Constant.RAW_DATA_PATH)
        data['temperature_mean'] = (data['temperature_low'] + data['temperature_high']) / 2
        data.to_csv(Constant.RAW_DATA_PATH, index=False, encoding='utf_8_sig')

    @staticmethod
    def change_encoding():
        """修改文件编码方式"""
        import codecs
        fp = codecs.open(Constant.WEATHER_DATA_PATH, 'r', 'GBK')
        content = fp.read()
        codecs.open(Constant.DATA_PATH + 'weather_utf8.csv', 'w', 'UTF-8').write(content)


class PlotUtils:
    @staticmethod
    def plot(truth, prediction, epoch, show=False):
        plt.plot(truth, color='blue', label='truth')
        plt.plot(prediction, color='g', label='prediction')
        plt.grid(True, which='both')
        plt.legend()
        if show:
            plt.show()
        else:
            save_path = os.path.join(Constant.GRAPH_PATH, f'epoch_{epoch}.png')
            plt.savefig(save_path)
        plt.close()


def regenerate_dataset():
    """重新生成数据集"""
    DatasetUtils.generate_dataset()


def update_raw_data():
    """一些修改原始数据的操作"""
    # DatasetUtils.weather_change_format()
    # DatasetUtils.add_covariance()
    # DatasetUtils.change_encoding()
    # DatasetUtils.add_date()
    # DatasetUtils.create_lag_data()
    DatasetUtils.add_weather()
    # DatasetUtils.add_temperature_mean()


if __name__ == "__main__":
    """for test"""
    regenerate_dataset()
    # update_raw_data()
