import torch
from torch_geometric.data import InMemoryDataset
from torch_geometric.data import Data
from torch.utils.data import Dataset        # 构造数据集 支持索引，总长度
from torch.utils.data import DataLoader
import numpy as np
import os
from os import path

class MyGraphDataset(InMemoryDataset):
    def __init__(self, root='my_dataset', transform=None, pre_transform=None):
        super(MyDataset, self).__init__(root, transform, pre_transform)
        self.data, self.slices = torch.load(self.processed_paths[0])

    @property
    def raw_file_names(self):
        return ['EDGE_FEATURES.txt', 'EDGE_INDEX.txt', 'GRAPH_INDICATOR.txt','NODE_FEATURES.txt']

    @property
    def processed_file_names(self):
        return ['data.pt']

    def download(self):
        # Download to `self.raw_dir`.
        pass

    def process(self):
        # Read data into huge `Data` list.
        data_list=[]
        for i in range(0,15):
            edge_index = torch.tensor([[0, 1, 1, 2],
                                       [1, 0, 2, 1]], dtype=torch.long)
            x = torch.tensor([[i], [i+1], [i+2]], dtype=torch.float)
            edge_attr=torch.tensor([[11,22], [33,44], [55,66],[77,88]], dtype=torch.float)
            y=torch.tensor([-1], dtype=torch.long)
            if i%2==0:
                y=torch.tensor([1,1,1], dtype=torch.long)
            else:
                y = torch.tensor([2,2,2], dtype=torch.long)
            # y是占位的
            data = Data(x=x,y=y,edge_attr=edge_attr, edge_index=edge_index)
            data_list.append(data)


        if self.pre_filter is not None:
            data_list = [data for data in data_list if self.pre_filter(data)]

        if self.pre_transform is not None:
            data_list = [self.pre_transform(data) for data in data_list]

        data, slices = self.collate(data_list)
        torch.save((data, slices), self.processed_paths[0])

class MyDataset(Dataset):
    def __init__(self, type='single_flow_single_link',network='network_0'):
        """
        处理数据的两种做法：
            1: All Data load to Memory  （结构化数据）
            2: 定义一个列表，把每个 sample 路径 放到一个列表，标签放到另一个列表里，避免数据一次性全部加入到内存 （非结构化数据）
        """

        # directory='dataset\single_flow_single_link'
        # file_list = os.listdir(directory)
        # for f in file_list:
        #     # 字符串拼接
        #     file_url = path.join(directory, f)
        #     np.loadtxt(file_url + '\\node_data.txt', delimiter=',', dtype=np.float32)
        #     np.loadtxt(file_url + '\\link_data.txt', delimiter=',', dtype=np.float32)
        #     np.loadtxt(file_url + '\\flow_data.txt', delimiter=',', dtype=np.float32)
        #     np.loadtxt(file_url + '\\flow_label.txt', delimiter=',', dtype=np.float32)
        base_path=path.join('dataset', type)
        base_path=path.join(base_path,network)
        node_data_path=path.join(base_path,'node_data.txt')
        link_data_path = path.join(base_path, 'link_data.txt')
        flow_data_path = path.join(base_path, 'flow_data.txt')
        flow_label_path = path.join(base_path, 'flow_label.txt')
        if type=='single_flow_single_link':
            # 从文件读取数据
            node_data = np.loadtxt(node_data_path, delimiter=',').reshape((-1, 4, 2))
            link_data = np.loadtxt(link_data_path, delimiter=',').reshape((103, -1, 3))
            flow_data = np.loadtxt(flow_data_path, delimiter=',').reshape((-1, 1, 5))
            flow_label = np.loadtxt(flow_label_path, delimiter=',').reshape((-1, 1, 1))
        elif type=='multi_flow_single_link':
            node_data = np.loadtxt(node_data_path, delimiter=',').reshape((-1, 5, 2))
            link_data = np.loadtxt(link_data_path, delimiter=',').reshape((103, -1, 3))
            flow_data = np.loadtxt(flow_data_path, delimiter=',').reshape((-1, 2, 5))
            flow_label = np.loadtxt(flow_label_path, delimiter=',').reshape((-1, 2, 1))
        elif type=='multi_flow_multi_link':
            node_data = np.loadtxt(node_data_path, delimiter=',').reshape((-1, 9, 2))
            link_data = np.loadtxt(link_data_path, delimiter=',').reshape((103, -1, 3))
            flow_data = np.loadtxt(flow_data_path, delimiter=',').reshape((-1, 4, 5))
            flow_label = np.loadtxt(flow_label_path, delimiter=',').reshape((-1, 4, 1))
        elif type=='multi_flow_multi_link_mixed':
            node_data = np.loadtxt(node_data_path, delimiter=',').reshape((-1, 14, 2))
            link_data = np.loadtxt(link_data_path, delimiter=',').reshape((-1, 3, 20))
            link_data = link_data.transpose((0,2,1))
            flow_data = np.loadtxt(flow_data_path, delimiter=',').reshape((-1, 13, 5))
            flow_label = np.loadtxt(flow_label_path, delimiter=',').reshape((-1, 13, 1))


        self.node_data = torch.tensor(data=node_data,dtype=torch.float)
        self.link_data = torch.tensor(data=link_data, dtype=torch.float)
        self.flow_data = torch.tensor(data=flow_data, dtype=torch.float)
        self.flow_label = torch.tensor(data=flow_label, dtype=torch.float)
        self.len = self.node_data.shape[0]
        # self.x_data = torch.from_numpy(xy[:, :-1])
        # self.y_data = torch.from_numpy(xy[:, [-1]])
        print("数据准备完毕...")

    def __getitem__(self, index):    # 为了支持下标操作，即索引 dataset[index]
        return self.node_data[index], self.link_data[index], self.flow_data[index], self.flow_label[index]

    def __len__(self):              # 为了使用 len(dataset)
        return self.len

class MySimpleDataset():
    def __init__(self):
        pass
    def get_dataset(self):
        # 最简单的有效数据
        # node_data=torch.tensor(data=[[1200.,3.],[0.,0.],[0.,0.],[0.,0.]],dtype=torch.float)
        # link_data = torch.tensor(data=[[0.,1.,2.,-1],
        #                                [1.,2.,3.,-1],
        #                                [0.,1024.,0.,-1]], dtype=torch.float)
        # flow_data = torch.tensor(data=[[0.,1.,2.,3.,-1.]], dtype=torch.float)
        # flow_label = torch.tensor(data=[[1024.]], dtype=torch.float)
        # 中等数据
        # node_data = torch.tensor(data=[[300., 5.],
        #                                [900., 5.],
        #                                [0., 0.],
        #                                [0., 0.],
        #                                [0., 0.]], dtype=torch.float)
        # link_data = torch.tensor(data=[[0., 1., 2., 3.],
        #                                [2., 2., 3., 4.],
        #                                [0., 0., 1000. ,0.]], dtype=torch.float)
        # flow_data = torch.tensor(data=[[0., 2., 3., 4., -1.],
        #                                [1., 2., 3., 4., -1.]], dtype=torch.float)
        # flow_label = torch.tensor(data=[[300.],
        #                                 [700.]], dtype=torch.float)
        # 复杂数据
        # node_data = torch.tensor(data=[[300., 7.],
        #                                [400., 7.],
        #                                [0., 0.],
        #                                [400., 7.],
        #                                [300., 7.],
        #                                [0., 0.],
        #                                [0., 0.],
        #                                [0., 0.]], dtype=torch.float)
        # link_data = torch.tensor(data=[[0., 1., 2., 3., 4., 5., 6.],
        #                                [2., 2., 5., 5., 5., 6., 7.],
        #                                [0., 0., 600., 0., 0., 1000., 0.]], dtype=torch.float)
        # flow_data = torch.tensor(data=[[0., 2., 5., 6., 7.],
        #                                [1., 2., 5., 6., 7.],
        #                                [2., 5., 6., 7., -1.],
        #                                [3., 5., 6., 7., -1.],], dtype=torch.float)
        # flow_label = torch.tensor(data=[[175.],
        #                                 [175.],
        #                                 [350.],
        #                                 [300.]], dtype=torch.float)
        # 从文件读取数据
        node_data = np.loadtxt('dataset\single_flow_single_link\\network_0\\node_data.txt', delimiter=',').reshape((-1, 4, 2))
        link_data = np.loadtxt('dataset\single_flow_single_link\\network_0\\link_data.txt', delimiter=',').reshape((-1, 3, 4))
        flow_data = np.loadtxt('dataset\single_flow_single_link\\network_0\\flow_data.txt', delimiter=',').reshape((-1, 1, 5))
        flow_label = np.loadtxt('dataset\single_flow_single_link\\network_0\\flow_label.txt', delimiter=',').reshape((-1, 1, 1))

        node_data = torch.tensor(data=node_data,dtype=torch.float)
        link_data = torch.tensor(data=link_data, dtype=torch.float)
        flow_data = torch.tensor(data=flow_data, dtype=torch.float)
        flow_label = torch.tensor(data=flow_label, dtype=torch.float)
        return {'node_data':node_data,'link_data':link_data,'flow_data':flow_data,'flow_label':flow_label}



if __name__ == '__main__':
    MyDataset()