import pickle
import numpy as np
import os
import time
import random
import logging
from util import *
from torch_geometric.data import Data
from torch_geometric.loader import DataLoader

sn_service_list = ['home-timeline-service', 'post-storage-service', 'media-service', 'nginx-web-server', 'text-service', 'user-mention-service', 'social-graph-service', 'user-service', 'compose-post-service', 'url-shorten-service', 'unique-id-service', 'user-timeline-service']
sn_endpoint_edge_map = {
    'http://localhost:8080/api/user/follow POST': [[0, 2], [1, 3], [1, 4], [1, 5], [2, 1], [2, 6], [2, 7], [6, 8], [7, 9]],
    'http://localhost:8080/api/user/unfollow POST': [[0, 5], [4, 1], [4, 2], [4, 3], [5, 4], [5, 6], [5, 7], [6, 8], [7, 9]],
    'http://localhost:8080/api/user/get_followee GET': [[0, 1], [1, 2], [1, 3], [1, 4]],
    'http://localhost:8080/api/post/compose POST': [[0, 24], [1, 10], [2, 0], [2, 1], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 18], [4, 21], [5, 13], [6, 9], [7, 27], [8, 14], [9, 8], [11, 2], [13, 12], [14, 15], [14, 16], [14, 17], [18, 19], [18, 20], [19, 22], [20, 23], [27, 25], [27, 26]],
    'http://localhost:8080/api/user/get_follower GET': [[0, 1], [1, 2], [1, 3], [1, 4]],
    'http://localhost:8080/api/home-timeline/read?start=0&stop=100 GET': [[1, 0], [1, 3], [2, 1]],
    'http://localhost:8080/api/user/register POST': [[0, 1]],
    'http://localhost:8080/api/user/login POST': [[0, 1], [1, 2]],
}
sn_endpoint_node_map = {
    'http://localhost:8080/api/user/follow POST': {0: 'nginx-web-server:Follow_0',
                                                   1: 'social-graph-service:follow_server_0',
                                                   2: 'social-graph-service:follow_with_username_server_0',
                                                   3: 'social-graph-service:mongo_update_client_0',
                                                   4: 'social-graph-service:social_graph_mongo_update_client_0',
                                                   5: 'social-graph-service:social_graph_redis_update_client_0',
                                                   6: 'user-service:get_user_id_server_0',
                                                   7: 'user-service:get_user_id_server_1',
                                                   8: 'user-service:user_mmc_get_user_id_client_0',
                                                   9: 'user-service:user_mmc_get_user_id_client_1'},
    'http://localhost:8080/api/user/unfollow POST': {0: 'nginx-web-server:Unfollow_0',
                                                     1: 'social-graph-service:social_graph_mongo_delete_client_0',
                                                     2: 'social-graph-service:social_graph_mongo_delete_client_1',
                                                     3: 'social-graph-service:social_graph_redis_update_client_0',
                                                     4: 'social-graph-service:unfollow_server_0',
                                                     5: 'social-graph-service:unfollow_with_username_server_0',
                                                     6: 'user-service:get_user_id_server_0',
                                                     7: 'user-service:get_user_id_server_1',
                                                     8: 'user-service:user_mmc_get_user_id_client_0',
                                                     9: 'user-service:user_mmc_get_user_id_client_1'},
    'http://localhost:8080/api/user/get_followee GET': {0: 'nginx-web-server:GetFollowee_0',
                                                        1: 'social-graph-service:get_followees_server_0',
                                                        2: 'social-graph-service:social_graph_mongo_find_client_0',
                                                        3: 'social-graph-service:social_graph_redis_get_client_0',
                                                        4: 'social-graph-service:social_graph_redis_insert_client_0'},
    'http://localhost:8080/api/post/compose POST': {0: 'compose-post-service:compose_creator_client_0',
                                                    1: 'compose-post-service:compose_media_client_0',
                                                    2: 'compose-post-service:compose_post_server_0',
                                                    3: 'compose-post-service:compose_text_client_0',
                                                    4: 'compose-post-service:compose_unique_id_client_0',
                                                    5: 'compose-post-service:store_post_client_0',
                                                    6: 'compose-post-service:write_home_timeline_client_0',
                                                    7: 'compose-post-service:write_user_timeline_client_0',
                                                    8: 'home-timeline-service:get_followers_client_0',
                                                    9: 'home-timeline-service:write_home_timeline_server_0',
                                                    10: 'media-service:compose_media_server_0',
                                                    11: 'nginx-web-server:compose_post_client_0',
                                                    12: 'post-storage-service:post_storage_mongo_insert_client_0',
                                                    13: 'post-storage-service:store_post_server_0',
                                                    14: 'social-graph-service:get_followers_server_0',
                                                    15: 'social-graph-service:social_graph_mongo_find_client_0',
                                                    16: 'social-graph-service:social_graph_redis_get_client_0',
                                                    17: 'social-graph-service:social_graph_redis_insert_client_0',
                                                    18: 'text-service:compose_text_server_0',
                                                    19: 'text-service:compose_urls_client_0',
                                                    20: 'text-service:compose_user_mentions_client_0',
                                                    21: 'unique-id-service:compose_unique_id_server_0',
                                                    22: 'url-shorten-service:compose_urls_server_0',
                                                    23: 'user-mention-service:compose_user_mentions_server_0',
                                                    24: 'user-service:compose_creator_server_0',
                                                    25: 'user-timeline-service:write_user_timeline_mongo_insert_client_0',
                                                    26: 'user-timeline-service:write_user_timeline_redis_update_client_0',
                                                    27: 'user-timeline-service:write_user_timeline_server_0'},
    'http://localhost:8080/api/user/get_follower GET': {0: 'nginx-web-server:GetFollower_0',
                                                        1: 'social-graph-service:get_followers_server_0',
                                                        2: 'social-graph-service:social_graph_mongo_find_client_0',
                                                        3: 'social-graph-service:social_graph_redis_get_client_0',
                                                        4: 'social-graph-service:social_graph_redis_insert_client_0'},
    'http://localhost:8080/api/home-timeline/read?start=0&stop=100 GET': {
        0: 'home-timeline-service:read_home_timeline_redis_find_client_0',
        1: 'home-timeline-service:read_home_timeline_server_0', 2: 'nginx-web-server:Login_0',
        3: 'post-storage-service:post_storage_read_posts_server_0'},
    'http://localhost:8080/api/user/register POST': {0: 'nginx-web-server:RegisterUser_0',
                                                     1: 'user-service:register_user_server_0'},
    'http://localhost:8080/api/user/login POST': {0: 'nginx-web-server:Login_0', 1: 'user-service:login_server_0',
                                                  2: 'user-service:user_mmc_get_client_0'},
}



def load_sn_full(args, limits=5):
    '''
    :param args:
    :param limits: if #node < limits, drop the trace
    :return:
    '''

    t = time.time()

    dataset_dir = os.path.join(args.data_dir, args.dataset)
    if not os.path.exists(dataset_dir):
        raise ValueError(f"{dataset_dir} not exist.")

    time_index = load_pkl(os.path.join(dataset_dir, 'sn_time_index.pkl'))
    metric_all_data = load_npy(os.path.join(dataset_dir, 'sn_metric_all.npy'))   # (#service, seq_len, F_m)
    trace_data = load_pkl(os.path.join(dataset_dir, 'sn_trace.pkl'))      # (trace_feat, metainfo) trace_feat:(L, F_s)

    endpoint_edge_map = sn_endpoint_edge_map
    endpoint_node_map = sn_endpoint_node_map
    service_list = sn_service_list

    trace_feat_list = []
    metainfo_list = []
    for i in range(len(trace_data)):
        mt = trace_data[i][1]
        if mt['num_span'] < limits:
            continue

        metainfo_list.append(mt)
        trace_feat = trace_data[i][0][:, :-1]                   # drop last column: degree
        trace_feat_list.append(torch.FloatTensor(trace_feat))


    # normalize
    standard_scale_trace(trace_feat_list)
    metric_all_data = standard_scale_metric(metric_all_data)
    metric_all_data = torch.FloatTensor(metric_all_data)

    data_list = []
    num_abnormal = 0
    for i in range(len(trace_feat_list)):
        trace_feat = trace_feat_list[i]
        mt = metainfo_list[i]
        endpoint = mt['endpoint']

        idx_node_map = endpoint_node_map[endpoint]
        x = align_trace_metric(trace_feat, mt, time_index, metric_all_data, idx_node_map, service_list, args.window)

        if x is None:
            continue

        edge_list = endpoint_edge_map[endpoint]
        edge_index = torch.LongTensor(edge_list).t().contiguous()

        y = torch.LongTensor([mt['is_abnormal']])
        if mt['is_abnormal'] == 1:
            num_abnormal += 1
        data_list.append(Data(x=x, edge_index=edge_index, y=y))

    perc_abnormal = num_abnormal / len(data_list) * 100
    logging.info(f"total trace in {args.dataset}: {len(data_list)}, abnormal: {perc_abnormal:.4f}%")

    # shuffle to make abnoraml samples balance in train, val, test
    random.shuffle(data_list)

    train_dataset = data_list[:int(0.6*len(data_list))]
    val_dataset = data_list[int(0.6*len(data_list)):int(0.8*len(data_list))]
    test_dataset = data_list[int(0.8*len(data_list)):]

    num_train, num_val, num_test = 0, 0, 0
    for i in range(len(data_list)):
        y = data_list[i].y
        if y.item() == 0:
            continue
        if i < len(train_dataset):
            num_train += 1
        elif i < len(train_dataset) + len(val_dataset):
            num_val += 1
        else:
            num_test += 1
    p_train, p_val, p_test = num_train/len(train_dataset)*100, num_val/len(val_dataset)*100, num_test/len(test_dataset)*100
    logging.info(f"Abnormal ratio | train: {p_train:.2f}% | val: {p_val:.2f}% | test: {p_test:.2f}%")

    train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True)
    val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False)
    test_loader = DataLoader(test_dataset, batch_size=1, shuffle=False)

    logging.info("Loading completed, total time usage {:.4f}s".format(time.time() - t))

    args.span_feats = trace_feat_list[0].shape[1]
    args.metric_feats = metric_all_data.shape[2]

    return train_loader, val_loader, test_loader


def load_sn_wo_metric(args, limits=5):
    '''
    :param args:
    :param limits: if #node < limits, drop the trace
    :return:
    '''

    t = time.time()

    dataset_dir = os.path.join(args.data_dir, args.dataset)
    if not os.path.exists(dataset_dir):
        raise ValueError(f"{dataset_dir} not exist.")

    trace_data = load_pkl(os.path.join(dataset_dir, 'sn_trace.pkl'))      # (trace_feat, metainfo) trace_feat:(L, F_s)

    endpoint_edge_map = sn_endpoint_edge_map

    trace_feat_list = []
    metainfo_list = []
    for i in range(len(trace_data)):
        mt = trace_data[i][1]
        if mt['num_span'] < limits:
            continue

        metainfo_list.append(mt)
        trace_feat = trace_data[i][0][:, :-1]                   # drop last column: degree
        trace_feat_list.append(torch.FloatTensor(trace_feat))


    # normalize
    standard_scale_trace(trace_feat_list)

    data_list = []
    num_abnormal = 0
    for i in range(len(trace_feat_list)):
        trace_feat = trace_feat_list[i]
        mt = metainfo_list[i]
        endpoint = mt['endpoint']

        edge_list = endpoint_edge_map[endpoint]
        edge_index = torch.LongTensor(edge_list).t().contiguous()

        y = torch.LongTensor([mt['is_abnormal']])
        if mt['is_abnormal'] == 1:
            num_abnormal += 1
        data_list.append(Data(x=trace_feat, edge_index=edge_index, y=y))

    perc_abnormal = num_abnormal / len(data_list) * 100
    logging.info(f"total trace in {args.dataset}: {len(data_list)}, abnormal: {perc_abnormal:.4f}%")

    # shuffle to make abnoraml samples balance in train, val, test
    random.shuffle(data_list)

    train_dataset = data_list[:int(0.6*len(data_list))]
    val_dataset = data_list[int(0.6*len(data_list)):int(0.8*len(data_list))]
    test_dataset = data_list[int(0.8*len(data_list)):]

    num_train, num_val, num_test = 0, 0, 0
    for i in range(len(data_list)):
        y = data_list[i].y
        if y.item() == 0:
            continue
        if i < len(train_dataset):
            num_train += 1
        elif i < len(train_dataset) + len(val_dataset):
            num_val += 1
        else:
            num_test += 1
    p_train, p_val, p_test = num_train/len(train_dataset)*100, num_val/len(val_dataset)*100, num_test/len(test_dataset)*100
    logging.info(f"Abnormal ratio | train: {p_train:.2f}% | val: {p_val:.2f}% | test: {p_test:.2f}%")

    train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True)
    val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False)
    test_loader = DataLoader(test_dataset, batch_size=1, shuffle=False)

    logging.info("Loading completed, total time usage {:.4f}s".format(time.time() - t))

    args.span_feats = trace_feat_list[0].shape[1]
    args.metric_feats = 0

    return train_loader, val_loader, test_loader