import json
import os
from typing import Dict
import torch
from torch.utils.data.dataset import Dataset
import gensim
import pickle as pk
from loguru import logger
import tqdm
import _thread
import numpy as np

attr_label_range = {"pool_size": [i for i in range(1, 11)]}


def ins2torch(x, embedding_dim, model):
    tmp = []
    for b in x:
        if len(b) == 0:
            continue
        tmp.append(torch.zeros(1, len(b), embedding_dim))
        for pos, t in enumerate(b):
            if t.isdigit() or "0x" in t:
                t = "num"
            if t not in model.wv:
                continue
            vector = torch.from_numpy(np.copy(model.wv[t]))
            tmp[-1][:, pos, :] = vector
    return tmp


def get_onehot_label(func_file_name, attr_name, label_data):
    label_vector = torch.LongTensor([attr_label_range[attr_name].index(label_data[func_file_name])])
    return label_vector


def load_label(label_path, attr_name, attr_index) -> Dict:
    if not os.path.exists(label_path):
        logger.error("Label File is not found!")
    label_dict = {}
    with open(label_path, "r") as f:
        for line in f.readlines():
            file_name, json_data_raw = line.split("\\")
            json_data_raw = json_data_raw.replace("True", "1")
            json_data_raw = json_data_raw.replace("False", "0")
            json_data_raw = json_data_raw.replace("(", "[")
            json_data_raw = json_data_raw.replace(")", "]")
            json_data = json.loads(json_data_raw.replace("'", "\""))
            if attr_name in json_data:
                if isinstance(json_data[attr_name], list):
                    label_dict[file_name] = json_data[attr_name][attr_index]
                else:
                    label_dict[file_name] = json_data[attr_name]
    return label_dict


class TVMAttrDataSetARM(Dataset):
    def __init__(self,
                 path,
                 label_path,
                 op_type,
                 attr_name,
                 attr_index=0,
                 embedding_dim=200,
                 train_ratio=0.7,
                 mode='train',
                 w2v_model="w2v-new.model"):

        super().__init__()
        self.data = []
        self.shape_data = []
        self.label = []
        self.value_data = []
        self.embedding_dim = embedding_dim
        embedding_dim = 200
        total_length = 0

        # Load the word2vec mdoel
        model = gensim.models.Word2Vec.load(w2v_model)
        self.model = model

        # Load data
        f = open(path, "rb")
        data_dict = pk.load(f)
        f.close()

        # Load label data
        label_data = load_label(label_path, attr_name, attr_index)

        # Count the length
        for k in data_dict:
            func_type = k.split('-')[0]
            if func_type == "auto_schedule" or func_type == "LSTM" or func_type != op_type:
                continue
            total_length += 1

        logger.info("Total training data: {}".format(total_length))
        tmp_count = 0

        logger.info("Start loading data ... [mode: {}]".format(mode))
        total_count = int(total_length * train_ratio)
        pbar_count = total_count
        if mode == 'test':
            pbar_count = total_length - total_count

        pbar = tqdm.tqdm(total=pbar_count)
        for k in data_dict:
            # logger.debug(k)
            func_type = k.split('-')[0]
            if func_type == "auto_schedule" or func_type == 'LSTM' or func_type != op_type:
                continue

            for func in data_dict[k]:

                if mode == 'train' and tmp_count >= (total_length * train_ratio):
                    continue
                if mode == 'test' and tmp_count <= (total_length * train_ratio):
                    tmp_count += 1
                    continue

                pbar.update(1)
                tmp_count += 1
                self.data.append(data_dict[k][func]["blocks"])
                target = get_onehot_label(k, attr_name, label_data)
                self.label.append(target)

                for b in data_dict[k][func]['blocks']:
                    if len(b) == 0:
                        continue
                    for _, t in enumerate(b):
                        if t.isdigit() or "0x" in t:
                            t = "num"
                        if t not in model.wv:
                            continue

        f.close()
        pbar.close()
        logger.info("Finished!")

        self.order = torch.randperm(len(self.label))
        # Prefetch
        prefetch_num = 10
        self.tmp_data = []
        self.fetch_idx = 10
        for i in self.order[:prefetch_num]:
            x, label_1, label_2, label_3 = self.get_data(i)
            tmp = ins2torch(x, self.embedding_dim, self.model)
            self.tmp_data.append([[tmp], label_1, label_2, label_3])

    def update_tmp_data(self):
        if self.fetch_idx == len(self.label):
            self.order = torch.randperm(len(self.label))
            self.fetch_idx = 0
        i = self.order[self.fetch_idx]
        self.fetch_idx += 1
        x, label_1, label_2, label_3 = self.get_data(i)
        tmp = ins2torch(x, self.embedding_dim, self.model)
        if len(tmp) > 0:
            self.tmp_data.append([[tmp], label_1, label_2, label_3])
        else:
            _thread.start_new_thread(self.update_tmp_data, ())

    def __getitem__(self, index):
        item = self.tmp_data.pop(0)
        _thread.start_new_thread(self.update_tmp_data, ())
        return item

    def get_data(self, index):

        return self.data[index], self.label[index], self.label[index], self.label[index]

    def __len__(self):
        return len(self.label)


if __name__ == "__main__":
    dataset = TVMAttrDataSetARM("arm_ida_data.pkl",
                                label_path="tvm_data/arm_data/aarch64_dataset/labels.txt",
                                op_type="MaxPooling2D",
                                attr_name="pool_size",
                                w2v_model="word2vec-train-ida-arm.model")
    for x in dataset:
        logger.info(len(x))
        logger.info("Label: {}".format(x[1]))
        exit(0)
