import copy
from tqdm import tqdm
import numpy as np
from torch.utils.data import Dataset
import torch
from functionSim_config import *
from torch.utils.data import DataLoader

from EasySample import EasySample


class functionSimDataset(Dataset):
    """
    这里将样本调调整成适合functionSim模型使用的格式
    主要的功能，将样本转为batch  batch支持1
    输入格式为：
        一对样本名和value  可能存在很多
    样本的输出格式为：
        样本应该为一对对的，每一对是两个，为a,b,value
    能区分batch,支持batch为1的情况
    其他情况，使用get_samples就好，在区分batch
    """

    def __init__(self, datas):
        """
        datas   [(x,y,value),(x,y,value),(x,y,value)]
        x,y为样本的md5值
        value为样本的标签值
        初始化是将需要用的样本加载进内存
        """
        self.data = datas
        self.samples = {}
        self.eSample = EasySample()
        print("functionSim正在加载样本")
        self.load_samples(datas)
        print("加载完成~")

    #   ind是指在batch中的位置
    def sampels_trans_to_batch(self, lth, att_dimension, batch, ind):
        max_node_size = -float("inf")
        for i in range(lth):
            max_node_size = max(max_node_size, len(batch[i][ind][0]))

        batch_adj, batch_att, batch_vtype, batch_adj_weight = [], [], [], []
        for i in range(lth):
            temp_adj = np.zeros((max_node_size, max_node_size))
            temp_att = np.zeros((max_node_size, att_dimension))
            temp_vtype = np.zeros((max_node_size, 3))

            sample_size = len(batch[i][ind][0])  # 当前样本的长度

            temp_adj[:sample_size, :sample_size] = batch[i][ind][0]
            temp_att[:sample_size, :] = batch[i][ind][1]
            temp_vtype[:sample_size, :] = batch[i][ind][2]

            batch_adj.append(copy.deepcopy(temp_adj))  # 深拷贝,免得出现错误
            batch_att.append(copy.deepcopy(temp_att))
            batch_vtype.append(copy.deepcopy(temp_vtype))
            # batch_adj.append(temp_adj)  # 深拷贝,免得出现错误
            # batch_att.append(temp_att)
            # batch_vtype.append(temp_vtype)

        res = [
            torch.tensor(np.array(batch_adj)),
            torch.tensor(np.array(batch_att)),
            torch.tensor(np.array(batch_vtype)),
        ]
        return res

    def adjust_samples_to_same_dimension(self, batch):
        lth = len(batch)
        att_dimension = len(batch[0][0][1][0])

        batch_value = []
        res_x, res_y = [], []
        res_x = self.sampels_trans_to_batch(lth, att_dimension, batch, 0)
        res_y = self.sampels_trans_to_batch(lth, att_dimension, batch, 1)
        for i in range(lth):
            batch_value.append(batch[i][3][0])

        return (torch.tensor(batch_value), res_x, res_y)

    #  其中一个样本
    def __getitem__(self, idx):
        sample_x = self.samples[self.data[idx][0]]
        x = [sample_x["adj"], sample_x["att"], sample_x["vtype"]]
        sample_y = self.samples[self.data[idx][1]]
        y = [sample_y["adj"], sample_y["att"], sample_y["vtype"]]
        value = torch.tensor([self.data[idx][2]])
        return [x, y, [0], value]

    def load_samples(self, datas):
        """
        输入为x,y,value
        """
        for i in tqdm(range(len(datas))):
            x, y, value = datas[i]
            if x not in self.samples:
                sample = self.eSample.queryProcessSample(x)
                self.samples[x] = sample
            if y not in self.samples:
                sample = self.eSample.queryProcessSample(y)
                self.samples[y] = sample

    def __len__(self):
        return len(self.data)


if __name__ == "__main__":
    a = [
        ("02d63ae96c49f0bed65dfc64404e2c59", "6e56ab5d8d53a5c8666007530f692f40", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "7e37cc16388e6616e704c73456b6a492", 1),
        ("02d63ae96c49f0bed65dfc64404e2c59", "29c7250265ee4419dc785931fc865b78", 1),
    ]
    print("处理functionSim数据集的预加载")
    dataset = functionSimDataset(a)
    print("functionSim数据集的预加载完成")

    dataloader = DataLoader(
        dataset,
        batch_size=batchSize,
        shuffle=True,
        num_workers=1,
        collate_fn=dataset.adjust_samples_to_same_dimension,
    )
    for i, (query, target1, target2) in enumerate(dataloader):
        print("--------")
