import sys
import time

import torch.utils
import torch.utils.data

sys.path.append('src')

import tasks.bvd.bvd_run as graph_constructor
from data.api import iter_dataset
import pickle
import torch
from torch_geometric.data import Data,Batch
from torch_geometric.loader import DataLoader
import nn.api as nn_api
import nn.simplest_gcn as simplest_gcn
import os
import numpy as np

class TestDataset(torch.utils.data.Dataset):
    def __init__(self,objects,samples_route=r'preprocessed_data/samples/cwe121.pkl'):
        self.labels=[]
        self.samples=[]
        self.samples_route=samples_route

        before_io = time.time()
        if os.path.exists(self.samples_route):
            with open(self.samples_route, 'rb') as f:
                data = pickle.load(f)
                for sample in data:
                    self.labels.append(sample['label'])
                    self.samples.append(sample['vector'])
        else:
            sample_ls=[]
            for obj in objects:
                self.labels.append(obj[1])
                vec = self.vectorize(obj[0])
                self.samples.append(vec)
                sample_ls.append({'label':obj[1], 'vector':vec})
            with open(self.samples_route, 'wb') as f:
                pickle.dump(sample_ls, f)

        end_io = time.time()
        print('vectorize total time:', end_io-before_io) # time:
        # self.labels = torch.LongTensor(self.labels)
            
    def vectorize(self,obj):
        # result={}
        # result['x'] =torch.FloatTensor(np.zeros((5000,128))) # 5000*128
        # result['edge_index']=torch.LongTensor(np.zeros((2,100))) # 2*n
        # 0-80 81-91 91-101 101-110 16*3+80
        
        # NOTE predefined one-hot dictionary
        one_hot_dict = {"xor": 0, "mov": 1, "pop": 2, "and": 3, "push": 4, "lea": 5, "call": 6, "add": 7, "sub": 8, "jmp": 9, "cmp": 10, "je": 11, "nop": 12, "leave": 13, "ret": 14, "test": 15, "hlt": 16, "div": 17, "imul": 18, "shr": 19, "shl": 20, "sar": 21, "movabs": 22, "setb": 23, "movzx": 24, "setne": 25, "jae": 26, "jne": 27, "jle": 28, "jbe": 29, "pxor": 30, "movaps": 31, "movq": 32, "js": 33, "cdqe": 34, "not": 35, "sete": 36, "jg": 37, "enter": 38, "rol": 39, "fadd": 40, "jb": 41, "rep stosq": 42}
        one_hot_dict['others'] = 43

        x = torch.FloatTensor(np.zeros((1000, 128)))



        # feature_pkl = r'preprocessed_data/features/' + '.pkl'
        # print(feature_pkl)
        # exit()
        # if os.path.exists(feature_pkl):
        #     with open(feature_pkl, 'rb') as f:
        #         data = pickle.load(f)
        #         x = data[0]
        #         edge_index = data[1]
        # else:
        idx = 0
        error=0
        obj.inst_nodes.sort(key=lambda x: x.address)
        for node in obj.inst_nodes:
            features = node.features
            # 0-43: one hot for feature[0]
            mnemonic = features[0]
            if mnemonic in one_hot_dict:
                print(mnemonic)
                x[idx][one_hot_dict[mnemonic]] = 1
            else:
                x[idx][one_hot_dict['others']] = 1
                error+=1
                
        #print(f'error:{error/len(obj.inst_nodes)}')
            # 44-85: feature[1]
            # for i in range(44, 44+len(features[1])):
            #     x[idx][i] = ord(features[1][i-44])
            # 86-127: feature[2]
            # for i in range(86, 86+len(features[2])):
            #     x[idx][i] = ord(features[2][i-86])            
        idx += 1
        # add edge_index
        # with open(feature_pkl, 'wb') as f:
        #     pickle.dump((x, edge_index), f)

        return Data(x)
        # 5000*128 nodes, 5000*2 edges
        # return Data(x=torch.FloatTensor(np.zeros((5000,128))),edge_index=torch.LongTensor(np.zeros((100,2))).t().contiguous())
        
    
    def __getitem__(self, index):
        data=self.samples[index]
        data.y=torch.LongTensor([self.labels[index]])
        data.x=data.x[:,:44]
        return data

    def __len__(self):
        return len(self.samples)

class TestModel(torch.nn.Module):
    def __init__(self,device):
        super(TestModel, self).__init__()
        self.linear = torch.nn.Linear(44, 128)
        self.classifier = torch.nn.Linear(128, 2)
        self.device=device
        if device:
            self.to(device)
        
    def forward(self, data):
        data=data.to(self.device)
        
        x=data.x.reshape(-1,1000,44)
        
        #print(data)
        x = torch.sum(x, dim=1)
        x=self.linear(x)
        x=torch.relu(x)
        x = self.classifier(x)
        return x

dataset_dir='preprocessed_data/julia/CWE121_Stack_Based_Buffer_Overflow'



objects=[]
pickle_savefile = 'caches_data/cfg_graph.pkl'
dataset_savefile='caches_data/test_dataset.pkl'

device=torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

def preprocess_json():
    all_files=iter_dataset(dataset_dir)
    i=0
    for files,label in all_files:
        cfg=files[1]
        #print(cfg)
        factory = graph_constructor.json_constructor(cfg, 'cfg')
        objects.append((factory.construct_by_inst(),label))
        i+=1
        print(f'{i}/{len(all_files)}')
        
    with open(pickle_savefile, 'wb') as f:
        #objects=pickle.load(f)
        pickle.dump(objects ,f)

def load_json_for_test():
    import os
    rebuild=True
    if os.path.exists(dataset_savefile) and rebuild:
        os.remove(dataset_savefile)    
    
    with open(pickle_savefile, 'rb') as f:
        objects = pickle.load(f)
    

        dataset=TestDataset(objects,samples_route=dataset_savefile)

        train_dataset,test_dataset=torch.utils.data.random_split(dataset,[int(len(dataset)*0.8),len(dataset)-int(len(dataset)*0.8)])

        train_loader=DataLoader(train_dataset,batch_size=128,shuffle=True)
        test_loader=DataLoader(test_dataset,batch_size=128,shuffle=False)

        model = TestModel(device=device)
        nn_api.train_model(model,train_loader,test_loader,device=device,lr=1e-3)

#preprocess_json()
load_json_for_test()
#print(len(all_files))