from typing import List, Tuple, Optional, Dict
import os
from torch import Tensor
from torch_geometric.data.data import BaseData
from torch_geometric.data import Data, Dataset
import torch
import torch_geometric.transforms as T
from create_alarms_data_002 import AlarmsData
from load_data_001 import alarms_df
from torch_geometric.data.collate import collate


class AlarmsBinaryDataset(Dataset):
    def __init__(self, root, transform=None, pre_transform=None):
        super(AlarmsBinaryDataset, self).__init__(root, transform, pre_transform)
        self.transform = transform
        self.pre_transform = transform
        # self.batch_size =   100

    @property
    def processed_paths(self) -> List[str]:
        r"""The absolute filepaths that must be present in order to skip
        processing."""
        files = self.processed_file_names
        return [os.path.join(self.processed_dir, f) for f in files]

    @property
    def raw_file_names(self):
        return ["alarms.csv"]

    def download(self):
        pass

    @property
    def processed_file_names(self):
        batch_len = len(alarms_df) // 200 + 1
        return ["data_{}.pt".format(i) for i in range(batch_len)]
        # return ["data_0.pt","data_1.pt","data_2.pt"]

    def process(self):
        data_list = []
        for i in range(len(self.processed_file_names)):
            start_id = i * 200 + 0
            end_id = i * 200 + 200
            alarm_data = AlarmsData(alarms_df, start_id, end_id)
            data = alarm_data.data
            data = T.ToUndirected()(data)
            # print("###########################",data)
            alarm_data.data_expalin(data[('alarm', 'on', 'host')].edge_index, type="hetero", edge_type="a2h")
            alarm_data.data_expalin(data[('alarm', 'to', 'btree')].edge_index, type="hetero", edge_type="a2b")
            # alarm_data.data_expalin(data[('host', 'belongsto', 'btree')].edge_index, type="hetero", edge_type="h2b")
            torch.save(data, os.path.join(self.processed_dir, 'data_{}.pt'.format(i)))

    # @staticmethod
    # def collate(
    #         data_list: List[BaseData],
    # ) -> Tuple[BaseData, Optional[Dict[str, Tensor]]]:
    #     r"""Collates a Python list of :class:`~torch_geometric.data.Data` or
    #     :class:`~torch_geometric.data.HeteroData` objects to the internal
    #     storage format of :class:`~torch_geometric.data.InMemoryDataset`."""
    #     if len(data_list) == 1:
    #         return data_list[0], None
    #
    #     data, slices, _ = collate(
    #         data_list[0].__class__,
    #         data_list=data_list,
    #         increment=False,
    #         add_batch=False,
    #     )
    #     # print("slicesslicesslicesslicesslices",slices)
    #     return data, slices

    def len(self):
        return len(self.processed_file_names)

    def get(self, idx):
        data = torch.load(os.path.join(self.processed_dir, f'data_{idx}.pt'))
        return data



from torch_geometric.data import Data, InMemoryDataset
my_dataset = AlarmsBinaryDataset(root="/home/Dyf/data/Alarms/data", transform=None)
my_dataset.process()
# print(my_dataset.get(1))
# train_data, val_data, test_data = transform(my_dataset.get(0).to_homogeneous())
# print(train_data)
# print(val_data)
# print(test_data)
# my_dataset = AlarmsBinaryDataset(root="data",transform=None)
# print(my_dataset.get(1))
