import pandas as pd
import numpy as np
from torch.utils.data import Dataset, DataLoader
import torch
import torch.nn.functional as F

weather = {'大雨': 0, '小雨': 1, '雷阵雨': 2, '阴': 3, '中雨': 4, '暴雨': 5, '多云': 6, '阵雨': 7, '晴': 8,
           '小到中雨': 9, '中到大雨': 10, '大到暴雨': 11}
direction = {'西北风': 0, '西南风': 1, '北风': 2, '东北风': 3, '西风': 4, '东南风': 5, '南风': 6, '东风': 7}


def processing_data():
    pf = pd.read_csv("./data/weather.csv", encoding="utf-8")
    data = pf.to_numpy()
    data[:, 1] = np.array([weather[item] for item in data[:, 1]])
    data[:, 2] = np.array([weather[item] for item in data[:, 2]])
    data[:, -6] = np.array([direction[item] for item in data[:, -6]])
    data[:, -5] = np.array([direction[item] for item in data[:, -5]])
    # 将MSELoss的部分与CrossEntropyLoss的部分进行分割
    mse = data[:, [3, 4, 7, 8, 9, 10]]
    ce = data[:, [1, 2, 5, 6]]

    mse = torch.from_numpy(mse.astype(np.float32))
    ce = torch.from_numpy(ce.astype(np.float32))
    w1 = F.one_hot(ce[:, 0].long(), 12)  # (366,12)
    w2 = F.one_hot(ce[:, 1].long(), 12)  # (366,12)
    d1 = F.one_hot(ce[:, 2].long(), 8)  # (366,8)
    d2 = F.one_hot(ce[:, 3].long(), 8)  # (366,8)
    ce = torch.cat([w1, w2, d1, d2], dim=-1)  # (366,40)
    return torch.cat([mse, ce], dim=-1)


class WeatherDataset(Dataset):
    def __init__(self, seq_len=10):
        super().__init__()
        data = processing_data()
        total_len = data.shape[0]
        inputs = []
        outputs = []
        for i in range(total_len - seq_len):
            inputs.append(data[i:i + seq_len, :])
            outputs.append(data[i + 1:i + 1 + seq_len, :])

        self.inputs = torch.stack(inputs, dim=0)
        self.outputs = torch.stack(outputs, dim=0)
        # 输出形态大小torch.Size([356, 10, 46])  == (batch_size, seq_len,vocab_size)

    def __len__(self):
        return len(self.inputs)

    def __getitem__(self, index):
        return self.inputs[index], self.outputs[index]


def generate_weather_loader(seq_len, batch_size=10):
    dataset = WeatherDataset(seq_len)
    train_loader = DataLoader(dataset, batch_size=batch_size, drop_last=True, shuffle=True)
    return train_loader
