import pickle
import numpy as np
import os
import time
import random
import logging
from util import *
from torch_geometric.data import Data
from torch_geometric.loader import DataLoader


def load_sn(args):

    t = time.time()

    dataset_dir = os.path.join(args.data_dir, args.dataset)
    if not os.path.exists(dataset_dir):
        raise ValueError(f"{dataset_dir} not exist.")

    trace_data = load_pkl(os.path.join(dataset_dir, 'sn_trace_st.pkl'))

    endpoint_list = trace_data['endpoints']
    endpoint_node_map = trace_data['endpoint_node_map']
    endpoint_edge_map = trace_data['endpoint_edge_map']

    # normalize
    for endpoint in endpoint_list:
        normal_trace_data = trace_data['normal'][endpoint][:, :, 0]  # (T_i, N_i)
        abnormal_trace_data = trace_data['abnormal'][endpoint][:, :, 0]

        normal_trace_data = normalize(torch.FloatTensor(normal_trace_data))
        abnormal_trace_data = normalize(torch.FloatTensor(abnormal_trace_data))

        trace_data['normal'][endpoint] = normal_trace_data
        trace_data['abnormal'][endpoint] = abnormal_trace_data

    data_list = []
    num_abnormal = 0
    for endpoint in endpoint_list:
        normal_trace_data = trace_data['normal'][endpoint]     # (T_i, N_i)
        abnormal_trace_data = trace_data['abnormal'][endpoint]
        edge_list = endpoint_edge_map[endpoint]
        label_data = trace_data['label'][endpoint]

        for i in range(normal_trace_data.shape[0]):
            trace_x = normal_trace_data[i].unsqueeze(1)    # (N_i,1)
            trace_y = torch.LongTensor([0])
            edge_index = torch.LongTensor(edge_list).t().contiguous()
            data = Data(x=trace_x, edge_index=edge_index, y=trace_y)
            data_list.append(data)

        for i in range(abnormal_trace_data.shape[0]):
            trace_x = abnormal_trace_data[i].unsqueeze(1)  # (N_i,1)
            # trace_y = torch.LongTensor([label_data[i]])
            trace_y = torch.LongTensor([1])

            edge_index = torch.LongTensor(edge_list).t().contiguous()
            data = Data(x=trace_x, edge_index=edge_index, y=trace_y)
            data_list.append(data)
            num_abnormal += label_data[i]

    perc_abnormal = num_abnormal / len(data_list) * 100
    logging.info(f"total trace in {args.dataset}: {len(data_list)}, abnormal: {perc_abnormal:.4f}%")

    # shuffle to make abnoraml samples balance in train, val, test
    random.shuffle(data_list)

    train_dataset = data_list[:int(0.6 * len(data_list))]
    val_dataset = data_list[int(0.6 * len(data_list)):int(0.8 * len(data_list))]
    test_dataset = data_list[int(0.8 * len(data_list)):]

    num_train, num_val, num_test = 0, 0, 0
    for i in range(len(data_list)):
        y = data_list[i].y
        if y.item() == 0:
            continue
        if i < len(train_dataset):
            num_train += 1
        elif i < len(train_dataset) + len(val_dataset):
            num_val += 1
        else:
            num_test += 1
    p_train, p_val, p_test = num_train / len(train_dataset) * 100, num_val / len(val_dataset) * 100, num_test / len(
        test_dataset) * 100
    logging.info(f"Abnormal ratio | train: {p_train:.2f}% | val: {p_val:.2f}% | test: {p_test:.2f}%")

    train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True)
    val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False)
    test_loader = DataLoader(test_dataset, batch_size=1, shuffle=False)

    logging.info("Loading completed, total time usage {:.4f}s".format(time.time() - t))

    return train_loader, val_loader, test_loader
