from torch.utils.data import Dataset, DataLoader, Subset
import pandas as pd
import torch
import sys


def transform_PathStage(df, label_name):
    filt = df[label_name] == ' '
    # 为了防止SettingWithCopyWarning，加上了reset_index()
    # 参考：https://stackoverflow.com/questions/43165241/settingwithcopy-when-creating-new-column-and-when-dropping-nan-rows
    df = df.loc[~filt].reset_index()
    df[label_name] = df[label_name].apply(lambda x: int(x))
    return df


# 对每个标签进行相应的变换
transform = {
    'PathTstage': transform_PathStage,
    'PathNstage': transform_PathStage,
}


class MyDataset(Dataset):
    def __init__(self, data_path, features_start, features_end, label_name):
        """[summary]

        Args:
            data_path (str): 读取文件的路径
            features_start (int): 特征在表中的起始位置
            features_end (int): 特征在表中的终止位置
            label_name (str): 标签所在的表头名
        """
        df = pd.read_csv(data_path)
        # 由于标签可能出现异常值，因此先将整表进行处理，取出所有有效的数据
        df = transform[label_name](df, label_name) if transform.get(label_name) else df
        # 取出特征
        features_column = df.iloc[:, features_start:features_end].fillna(0)
        # 特征归一化
        self.features_column_norm = features_column.apply(
            lambda x: (x - x.mean()) / x.std())
        # 取出标签
        self.label_column_norm = df[label_name]

    def __len__(self):
        return self.features_column_norm.shape[0]

    def __getitem__(self, idx):
        return torch.tensor(self.features_column_norm.iloc[idx], dtype=torch.float32), torch.tensor(self.label_column_norm.iloc[idx], dtype=torch.float32).long()

    def get_targets(self):
        return self.label_column_norm.to_numpy()


if __name__ == '__main__':
    this_label_name = 'PathTstage'
    train_dataloader = DataLoader(MyDataset(data_path='data/intersected.csv', features_start=4,
                                  features_end=1266, label_name=this_label_name), batch_size=32, shuffle=False)
    for batch, (features, label) in enumerate(train_dataloader):
        print(features, label)
        # break
