import networkx as nx
import pandas as pd
import pickle
import numpy as np
import scipy.sparse as sp
from sklearn.model_selection import train_test_split
import community as community_louvain
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import random
import os
from sklearn.metrics import roc_auc_score

RANDOM_SEED = None


# Convert sparse matrix to tuple
def sparse_to_tuple(sparse_mx):
    if not sp.isspmatrix_coo(sparse_mx):
        sparse_mx = sparse_mx.tocoo()
    coords = np.vstack((sparse_mx.row, sparse_mx.col)).transpose()
    values = sparse_mx.data
    shape = sparse_mx.shape
    return coords, values, shape


# Get normalized adjacency matrix: A_norm
def preprocess_graph(adj):
    adj = sp.coo_matrix(adj)
    adj_ = adj + sp.eye(adj.shape[0])
    rowsum = np.array(adj_.sum(1))
    degree_mat_inv_sqrt = sp.diags(np.power(rowsum, -0.5).flatten())
    adj_normalized = adj_.dot(degree_mat_inv_sqrt).transpose().dot(degree_mat_inv_sqrt).tocoo()
    return sparse_to_tuple(adj_normalized)


# Prepare feed-dict for Tensorflow session
def construct_feed_dict(adj_normalized, adj, features, placeholders):
    # construct feed dictionary
    feed_dict = dict()
    feed_dict.update({placeholders['features']: features})
    feed_dict.update({placeholders['adj']: adj_normalized})
    feed_dict.update({placeholders['adj_orig']: adj})
    return feed_dict


# Perform train-test split
    # Takes in adjacency matrix in sparse format
    # Returns: adj_train, train_edges, val_edges, val_edges_false,
    # test_edges, test_edges_false
def mask_test_edges(adj, test_frac=.1, val_frac=.05, prevent_disconnect=True, verbose=False, proportion=1,news_edges=[]):
    # NOTE: Splits are randomized and results might slightly deviate from reported numbers in the paper.

    if verbose:
        print('preprocessing...')

    # Remove diagonal elements
    print(type(adj))
    num_nodes = adj.shape[0]
    print(num_nodes)
    adj = adj - sp.dia_matrix((adj.diagonal()[np.newaxis, :], [0]), shape=adj.shape)
    adj.eliminate_zeros()
    # Check that diag is zero:
    assert np.diag(adj.todense()).sum() == 0

    g = nx.from_scipy_sparse_matrix(adj)
    orig_num_cc = nx.number_connected_components(g)

    adj_triu = sp.triu(adj)  # upper triangular portion of adj matrix
    adj_tuple = sparse_to_tuple(adj_triu)  # (coords, values, shape), edges only 1 way
    edges = adj_tuple[0]  # all edges, listed only once (not 2 ways)
    # edges_all = sparse_to_tuple(adj)[0] # ALL edges (includes both ways)
    num_test = int(np.floor(edges.shape[0] * test_frac))  # controls how large the test set should be
    num_train = int(np.floor(edges.shape[0] * (1 - test_frac)))
    num_val = int(np.floor(edges.shape[0] * val_frac))  # controls how alrge the validation set should be
    print("edgenum-{}".format(edges.shape[0]))

    # Store edges in list of ordered tuples (node1, node2) where node1 < node2
    edge_tuples = [(min(edge[0], edge[1]), max(edge[0], edge[1])) for edge in edges]
    all_edge_tuples = set(edge_tuples)
    train_edges = set(edge_tuples)  # initialize train_edges to have all edges
    test_edges = set()
    val_edges = set()

    if verbose == True:
        print('generating test/val sets...')

    # Iterate over shuffled edges, add to train/val sets
    np.random.shuffle(edge_tuples)
    num_weak_test = int(num_test/3)
    weak_test_edges = []
    strong_test_edges = []
    weak_count = 0
    strong_count = 0
    for edge in edge_tuples:
        # print edge
        node1 = edge[0]
        node2 = edge[1]

        # If removing edge would disconnect a connected component, backtrack and move on
        g.remove_edge(node1, node2)
        if prevent_disconnect == True:
            if nx.number_connected_components(g) > orig_num_cc:
                g.add_edge(node1, node2)
                continue

        # Fill test_edges first
        if edge in news_edges and weak_count<num_weak_test:
            weak_test_edges.append(edge)
            weak_count = weak_count+1
        elif strong_count<num_test-num_weak_test:
            strong_test_edges.append(edge)
            strong_count = strong_count+1

        if len(test_edges) < num_test:
            test_edges.add(edge)
            train_edges.remove(edge)

        # Then, fill val_edges
        elif len(val_edges) < num_val:
            val_edges.add(edge)
            train_edges.remove(edge)

        # Both edge lists full --> break loop
        elif len(test_edges) == num_test and len(val_edges) == num_val and len(weak_test_edges) == num_weak_test:
            break

    num_test_edges = len(test_edges)
    num_weak_test_edges = int(num_test_edges / 3)
    if (len(val_edges) < num_val or len(test_edges) < num_test):
        print("WARNING: not enough removable edges to perform full train-test split!")
        print("Num. (test, val) edges requested: (", num_test, ", ", num_val, ")")
        print("Num. (test, val) edges returned: (", len(test_edges), ", ", len(val_edges), ")")

    if prevent_disconnect:
        assert nx.number_connected_components(g) == orig_num_cc

    if verbose:
        print('creating false test edges...')

    # G = g
    adj_train = [[0]*num_nodes]*num_nodes
    adj_train = np.array(adj_train)
    for edge in all_edge_tuples:
        adj_train[edge[0]][edge[1]] = adj_train[edge[1]][edge[0]] = 1

    # adj_D = [[0] * num_nodes] * num_nodes
    # adj_D = np.array(adj_D)
    # for i in range(num_nodes):
    #     adj_D[i][i] = np.sum(adj_train[i])
    # L = adj_D - adj_train
    # Lp = np.linalg.pinv(L)
    # N = [[1.0] * num_nodes] * num_nodes
    # N = np.array(N)
    # for i in range(num_nodes):
    #     for j in range(i+1, num_nodes):
    #         N[i][j] = N[j][i] = Lp[i][i] + Lp[j][j] - 2 * Lp[i][j]
    # print(ACT)


    # cg = ig.Graph(train_edges)
    # community = cg.community_infomap()
    # node_community = [0] * num_nodes
    # for i in range(len(community)):
    #     for v in community[i]:
    #         node_community[v] = i

    same_community = []
    dif_community = []
    # for edge in test_edges:
    #     if N[edge[0]][edge[1]] < 0.4:
    #         same_community.append(edge)
    #     else:
    #         dif_community.append(edge)

    # test_edges = list(test_edges)
    # test_ACT = [0] * num_test_edges
    # for i in range(num_test_edges):
    #     test_ACT[i] = N[test_edges[i][0]][test_edges[i][1]]
    #
    # test_ACT = np.array(test_ACT)
    #
    # # 取ACT最大的几条边，得到弱连接的下标
    # weak_id = test_ACT.argsort()[::-1][0:num_weak_test_edges]
    # weak_id = set(weak_id)
    #
    #
    # for i in range(num_test_edges):
    #     if i in weak_id:
    #         weak_test_edges.append(test_edges[i])
    #     else:
    #         strong_test_edges.append(test_edges[i])

    test_edges = weak_test_edges + strong_test_edges

    num_dif_community = num_weak_test_edges
    print('弱连接数量', num_dif_community)
    # sample_same_community = random.sample(same_community, num_dif_community)
    # test_edges = dif_community + same_community

    # train_same_community = []
    # train_dif_community = []
    # for edge in train_edges:
    #     if N[edge[0]][edge[1]] < 0.4:
    #         train_same_community.append(edge)
    #     else:
    #         train_dif_community.append(edge)

    train_num_dif_community = 0
    # train_edges = train_dif_community + train_same_community


    # print(partition)
    #
    # # draw the graph
    # pos = nx.spring_layout(G)
    # # color the nodes according to their partition
    # cmap = cm.get_cmap('viridis', max(partition.values()) + 1)
    # nx.draw_networkx_nodes(G, pos, partition.keys(), node_size=40,
    #                        cmap=cmap, node_color=list(partition.values()))
    # nx.draw_networkx_edges(G, pos, alpha=0.5)
    # plt.show()

    # test_edges_false = set()
    false_edges_num = 0
    all_edge_false = list()
    for idx_i in range(num_nodes):
        for idx_j in range(idx_i + 1, num_nodes):
            if idx_i == idx_j:
                continue

            false_edge = (idx_i,idx_j)
            if false_edge in all_edge_tuples:
                continue

            all_edge_false.append(false_edge)
            false_edges_num = false_edges_num+1
    # all_edge_false = list(tuple(l) for l in all_edge_false)
    train_edges_false, test_edges_false = train_test_split(all_edge_false, train_size=num_train * proportion)
    print("train_edges_false-{}".format(len(train_edges_false)))
    print("test_edges_false-{}".format(len(test_edges_false)))
    # test_edges_false = all_edge_false
    # test_edge_labels = np.concatenate([np.ones(len(test_edges)), np.zeros(len(test_edges_false))])
    # test_preds = []
    # for edge in test_edges:
    #     test_preds.append(ACT[edge[0]][edge[1]])
    #
    # for edge in test_edges_false:
    #     test_preds.append(ACT[edge[0]][edge[1]])
    # n2v_test_roc = roc_auc_score(test_edge_labels, test_preds)
    # print(n2v_test_roc)
    # os.system("pause")
    # while len(test_edges_false) < num_test:
    #     idx_i = np.random.randint(0, adj.shape[0])
    #     idx_j = np.random.randint(0, adj.shape[0])
    #     if idx_i == idx_j:
    #         continue
    #
    #     false_edge = (min(idx_i, idx_j), max(idx_i, idx_j))
    #
    #     # Make sure false_edge not an actual edge, and not a repeat
    #     if false_edge in all_edge_tuples:
    #         continue
    #     if false_edge in test_edges_false:
    #         continue
    #
    #     test_edges_false.add(false_edge)

    if verbose:
        print('creating false val edges...')

    val_edges_false = set()
    while len(val_edges_false) < num_val:
        idx_i = np.random.randint(0, adj.shape[0])
        idx_j = np.random.randint(0, adj.shape[0])
        if idx_i == idx_j:
            continue

        false_edge = (min(idx_i, idx_j), max(idx_i, idx_j))

        # Make sure false_edge in not an actual edge, not in test_edges_false, not a repeat
        if false_edge in all_edge_tuples or \
                false_edge in test_edges_false or \
                false_edge in val_edges_false:
            continue

        val_edges_false.add(false_edge)

    if verbose:
        print('creating false train edges...')


    if verbose:
        print('final checks for disjointness...')

    # # assert: false_edges are actually false (not in all_edge_tuples)
    # assert test_edges_false.isdisjoint(all_edge_tuples)
    # assert val_edges_false.isdisjoint(all_edge_tuples)
    # assert train_edges_false.isdisjoint(all_edge_tuples)
    #
    # # assert: test, val, train false edges disjoint
    # assert test_edges_false.isdisjoint(val_edges_false)
    # assert test_edges_false.isdisjoint(train_edges_false)
    # assert val_edges_false.isdisjoint(train_edges_false)
    #
    # # assert: test, val, train positive edges disjoint
    # assert val_edges.isdisjoint(train_edges)
    # assert test_edges.isdisjoint(train_edges)
    # assert val_edges.isdisjoint(test_edges)

    if verbose:
        print('creating adj_train...')

    # Re-build adj matrix using remaining graph
    adj_train = nx.adjacency_matrix(g)

    # Convert edge-lists to numpy arrays
    train_edges = np.array([list(edge_tuple) for edge_tuple in train_edges])
    train_edges_false = np.array([list(edge_tuple) for edge_tuple in train_edges_false])
    val_edges = np.array([list(edge_tuple) for edge_tuple in val_edges])
    val_edges_false = np.array([list(edge_tuple) for edge_tuple in val_edges_false])
    test_edges = np.array([list(edge_tuple) for edge_tuple in test_edges])
    test_edges_false = np.array([list(edge_tuple) for edge_tuple in test_edges_false])
    print(train_edges.shape)
    print(train_edges_false.shape)
    print(test_edges.shape)
    print(test_edges_false.shape)


    if verbose:
        print('Done with train-test split!')
        print('')

    # NOTE: these edge lists only contain single direction of edge!
    return adj_train, train_edges, train_edges_false, \
           val_edges, val_edges_false, test_edges, test_edges_false, num_dif_community, train_num_dif_community


# ---------- Generate Train-Test Splits ---------- #
FRAC_EDGES_HIDDEN = [0.1]

DEAL_NAME = 'grid_2d-200-200'

if __name__ == '__main__':
    combined_dir = 'D:/data-processed/{}-adj.pkl'.format(DEAL_NAME)
    with open(combined_dir, 'rb') as f:
        data = pickle.load(f)
    adj,news_edges = data
    # mkpath = "D://data//ACT13_sample//{}"
    # os.makedirs(mkpath.format(DEAL_NAME))

    for j in range(1, 2):
        TRAIN_TEST_SPLITS_FOLDER = 'D:/data/splited/ACT13_sample/1_{}/{}/'.format(j, DEAL_NAME)
        os.makedirs(TRAIN_TEST_SPLITS_FOLDER)
        for i in range(10):
            for frac_hidden in FRAC_EDGES_HIDDEN:
                val_frac = 0
                test_frac = frac_hidden - val_frac
                current_hidden = '{}-{}-{}-hidden'.format(DEAL_NAME, frac_hidden, i)

                            # Run all link prediction methods on current graph, store results
                train_test_splits  = mask_test_edges(adj, test_frac=test_frac, val_frac=val_frac, verbose=True, proportion=j,news_edges=news_edges)

                file_name = TRAIN_TEST_SPLITS_FOLDER + current_hidden + '.pkl'

                     # Save split
                with open(file_name, 'wb') as f:
                    pickle.dump(train_test_splits, f, protocol=2)

    # for frac_hidden in FRAC_EDGES_HIDDEN:
    #     val_frac = 0
    #     test_frac = frac_hidden - val_frac
    #     np.random.seed(RANDOM_SEED)
    #
    #     current_hidden = 'page_fb-{}-hidden'.format(frac_hidden)
    #
    #     # Run all link prediction methods on current graph, store results
    #     train_test_splits = mask_test_edges(adj, test_frac=test_frac, val_frac=val_frac, verbose=True)
    #
    #     file_name = TRAIN_TEST_SPLITS_FOLDER + current_hidden + '.pkl'
    #
    #     # Save split
    #     with open(file_name, 'wb') as f:
    #         pickle.dump(train_test_splits, f, protocol=2)

