import copy
import shelve
import numpy as np
import torch
from tqdm import tqdm
from torch.utils.data import Dataset, DataLoader
from easySample import easySample
from dataPre import dataPre
import sys
sys.path.append(r"/home/cyw/projects/function_sim_project/basic_script")


# load_from_shelve=True
load_from_shelve = False

modelName = "siamese_graphsage2"


class siamese_graphsage_dataset(Dataset):
    def __init__(self, pairInf, datasetName) -> None:
        self.data = pairInf
        self.datasetName = datasetName
        self.samples = {}
        self.eSample = easySample()

        # print("siamese_graphsage正在加载样本数据  self.samples")
        # self.load_samples(pairInf)
        # print("加载完成~~")

    def samples_trans_to_batch(self, lth, att_dimension, batch, ind):
        """
            样本维度统一
        """
        max_node_size = -float("inf")
        edgeSize = 0
        for i in range(lth):
            # max_node_size=max(max_node_size,len(batch[i]["x"]["adj"]))
            # 现在应该是att的长度,代表结点的个数
            max_node_size = max(max_node_size, len(batch[i][ind][1]))
            # 这个是最多边的个数
            # shape(2,N),这里要再加一个零才对
            edgeSize = max(edgeSize, len(batch[i][ind][0][0]))

        batch_adj, batch_att = [], []
        for i in range(lth):
            temp_adj = np.zeros((2, edgeSize))
            temp_att = np.zeros((max_node_size, att_dimension))

            # sample_size=len(batch[i]["x"]["adj"])#当前样本的长度
            sample_size = len(batch[i][ind][1])  # 当前样本的长度

            # temp_adj[:sample_size,:sample_size]=batch[i][ind][0]
            temp_adj[:, :len(batch[i][ind][0][0])] = batch[i][ind][0]
            temp_att[:sample_size, :] = batch[i][ind][1]

            batch_adj.append(copy.deepcopy(temp_adj))  # 深拷贝,免得出现错误
            batch_att.append(copy.deepcopy(temp_att))

        res = [torch.FloatTensor(np.array(batch_adj)).long(),
               torch.tensor(np.array(batch_att), dtype=torch.float32)]
        return res

    def adjust_samples_to_same_dimension(self, batch):
        lth = len(batch)
        # att_dimension=len(batch[0]["x"]["att"][0])
        att_dimension = len(batch[0][0][1][0])
        if att_dimension != 128:
            print("error  siamese_graphsage 数据批处理维度错误")
        batch_value = []
        res_x, res_y = [], []
        res_x = self.samples_trans_to_batch(lth, att_dimension, batch, 0)
        res_y = self.samples_trans_to_batch(lth, att_dimension, batch, 1)
        for i in range(lth):
            # batch_value.append(batch[i]["value"])
            batch_value.append(batch[i][2][0])
        return (torch.tensor(batch_value, dtype=torch.float32), res_x, res_y)

    # def load_samples(self,datas):
    #     """
    #         加载模型数据
    #         使用asm2vec获得函数节点的att
    #     """
    #     if load_from_shelve and self.datasetName in ["train","test","valid"]:
    #         with shelve.open(r"/home/cyw/projects/function_sim_project/all_data/sampleDatas/siamese_graphsage_for_train/datas_{}".format(self.datasetName)) as file:
    #             self.samples=file["samples"]
    #             file.close()
    #         print("从之前保存的数据中加载")

    #     for i in tqdm(range(len(datas))):
    #         x,y,value=datas[i]
    #         try:
    #             if x not in self.samples:
    #                 sample = self.eSample.get_sample(x,"siamese_graphsage")
    #                 self.samples[x]=sample
    #             if y not in self.samples:
    #                 sample = self.eSample.get_sample(y,"siamese_graphsage")
    #                 self.samples[y]=sample
    #         except:
    #             print("{}样本 或者 {} 样本加载失败".format(x,y))

    #     #   保存数据方便计算
    #     if self.datasetName in ["train","test","valid"]:
    #         with shelve.open(r"/home/cyw/projects/function_sim_project/all_data/sampleDatas/siamese_graphsage_for_train/datas_{}".format(self.datasetName)) as file:
    #             file["samples"]=self.samples
    #             file.close()

    # 改成稀疏矩阵,edge_index
    def __getitem__(self, idx):
        """
            初步看只需要adj和att,应该是符合的
        """
        # sample_x=self.samples[self.data[idx][0]]
        sample_x = self.eSample.get_sample(self.data[idx][0], modelName)
        print(self.data[idx][0])
        adj_sparse_x = torch.FloatTensor(
            sample_x["adj"]).to_sparse().indices().numpy()
        x = [adj_sparse_x, sample_x["att"]]

        # sample_y=self.samples[self.data[idx][1]]
        sample_y = self.eSample.get_sample(self.data[idx][1], modelName)

        adj_sparse_y = torch.FloatTensor(
            sample_y["adj"]).to_sparse().indices().numpy()
        y = [adj_sparse_y, sample_y["att"]]
        temp = 1 if self.data[idx][2] > 0.5 else 0
        value = torch.tensor([temp])
        return [x, y, value]

    def __len__(self):
        return len(self.data)


if __name__ == "__main__":
    # 数据集重新划分后，删除原来保存的数据，重新运行即可生成中间变量
    datapre = dataPre()
    data_pair_infs = datapre.get_pair_infs()
    siamese_graphsage_dataset(data_pair_infs["test"], "test")
    siamese_graphsage_dataset(data_pair_infs["train"], "train")
    siamese_graphsage_dataset(data_pair_infs["valid"], "valid")

    #  写一个简单的测试代码，看是否能正确加载
    #  能跑通，加载到dataloader里面
    # sample="387e8b94fb412139a600bd1c20c3b8b6"
    # testPair=[(sample,sample,1),
    #           ("1cf593553f5b14ec993125f146d35d72",sample,1),
    #           ("1cf593553f5b14ec993125f146d35d72","1cf593553f5b14ec993125f146d35d72",1)]
    # siamese_graphsage_dataset(testPair,"else")
