import torch
from torch.utils.data import Dataset, DataLoader
from functionSim_config import *
from easySample import easySample
from tqdm import tqdm
import numpy as np
import copy
import os
import shelve

load_from_shelve= False

eSample=easySample()

class HGMSimDataset(Dataset):
    """
        重写，构建嵌入模型的输入
    """
    def __init__(self, inputPaths, is_malware_Path, save_name):
        """
            输入为文件的路径list && 路径对应的标签list
        """
        
        if load_from_shelve== False:
            for ind in range(len(inputPaths)):
                ans = self.load_samples(inputPaths[ind],is_malware_Path[ind])
                name = save_name + "_path_{}".format(ind)
                with shelve.open("/home/cyw/projects/malware_detected/saveData/graphData/save_load_data/{}".format(name)) as file:
                    file["inputs"]=ans["sample"]
                    file["labels"]=ans["label"]
        self.inputs = []
        self.labels = []
        for ind in range(len(inputPaths)):
            name = save_name + "_path_{}".format(ind)
            with shelve.open("/home/cyw/projects/malware_detected/saveData/graphData/save_load_data/{}".format(name)) as file:
                self.inputs.extend(file["inputs"])
                self.labels.extend(file["labels"])
                print("input Path:{},样本数量：{}".format(inputPaths[ind],len(file["inputs"])))
    def __len__(self):
        return len(self.inputs)
    
    def __getitem__(self, idx):
        input_data = self.inputs[idx]
        label = self.labels[idx]
        return input_data, label
    
    def load_samples(self, tarPath,is_malware):
        """
            获得样本和标签
        """
        res=[]
        label=[]
        for fileName in tqdm(os.listdir(tarPath)):
            tar = fileName.split(".")
            if tar[1]=="dir":
                tarFile = os.path.join(tarPath,tar[0])
                try:
                    sample = eSample.get_sample(tarFile,"functionSim")
                    # 移除掉部分没有图结构的结点
                    if len(sample["adj"])!=0 and len(sample["adj"])<10000:
                        res.append(sample)
                        if is_malware:
                            label.append(1)
                        else:
                            label.append(0)
                except Exception as e:
                    print("exception：{}".format(e))
        ans={}
        ans["sample"]=res
        ans["label"]=label
        return ans

    def adjust_samples_to_same_dimension(self,batch):
        lth=len(batch)
        att_dimension=len(batch[0][0]['att'][0])
        batch_value = []
        max_node_size=-float("inf")
        for i in range(lth):
            max_node_size=max(max_node_size,len(batch[i][0]["adj"]))

        batch_adj,batch_att,batch_vtype = [],[],[]
        for i in range(lth):
            temp_adj=np.zeros((max_node_size,max_node_size))
            temp_att=np.zeros((max_node_size,att_dimension))
            temp_vtype=np.zeros((max_node_size,3))
            #当前样本的长度
            sample_size=len(batch[i][0]["adj"])
            temp_adj[:sample_size,:sample_size]=batch[i][0]['adj']
            temp_att[:sample_size,:]=batch[i][0]['att']
            temp_vtype[:sample_size,:]=batch[i][0]['vtype']
            #深拷贝,免得出现错误
            batch_adj.append(copy.deepcopy(temp_adj))
            batch_att.append(copy.deepcopy(temp_att))
            batch_vtype.append(copy.deepcopy(temp_vtype))

        res=[torch.tensor(np.array(batch_adj)),\
               torch.tensor(np.array(batch_att)),\
                torch.tensor(np.array(batch_vtype))]

        for i in range(lth):
            batch_value.append(batch[i][1])

        return (torch.tensor(batch_value).double(),res)
    

if __name__ == "__main__":
    inputPaths = ["/home/cyw/projects/malware_detected/saveData/graphData/test",
                  "/home/cyw/projects/malware_detected/saveData/graphData/test2"]
    is_malware = [True,False]
    my_dataset = HGMSimDataset(inputPaths,is_malware)
    dataloader = DataLoader(my_dataset, batch_size=batchSize, shuffle = True, num_workers=16,\
                            collate_fn=my_dataset.adjust_samples_to_same_dimension)
    for i,(label, res) in enumerate(dataloader):
        print(label)
        print(res)
        print(res)