import pickle
import numpy as np
import os
import time
import random
import logging
from util import *
from utils.graph_util import calculate_adjacency_matrix
from torch_geometric.data import Data
from torch_geometric.loader import DataLoader


def load_sn(dataset_dir):

    # dataset_dir = os.path.join(args.data_dir, args.dataset)
    if not os.path.exists(dataset_dir):
        raise ValueError(f"{dataset_dir} not exist.")

    trace_data = load_pkl(os.path.join(dataset_dir, 'sn_trace_st.pkl'))

    endpoint_list = trace_data['endpoints']
    endpoint_node_map = trace_data['endpoint_node_map']
    endpoint_edge_map = trace_data['endpoint_edge_map']

    train_data_list = []
    test_data_list = []
    label_data_list = []
    rc_data_list = []
    adj_list = []

    for endpoint in endpoint_list:
        train_data = trace_data['normal'][endpoint][:,:,0]     # (T_i, N_i)
        test_data = trace_data['abnormal'][endpoint][:,:,0]

        label_data = trace_data['label'][endpoint]
        rc_data = trace_data['rc'][endpoint]

        edge_list = endpoint_edge_map[endpoint]
        adj = calculate_adjacency_matrix(edge_list)    # (N_i, N_i)

        train_data = normalize(torch.FloatTensor(train_data))
        test_data = normalize(torch.FloatTensor(test_data))

        train_data_list.append(train_data)
        test_data_list.append(test_data)
        adj_list.append(torch.LongTensor(adj))
        label_data_list.append(label_data)
        rc_data_list.append(rc_data)

    return endpoint_list, train_data_list, test_data_list, adj_list, label_data_list, rc_data_list
