import torch
import numpy as np
import torch.nn as nn
from model_gtn import GTN
from model_fastgtn import FastGTNs
import pickle
import argparse
from torch_geometric.utils import add_self_loops
from sklearn.metrics import f1_score as sk_f1_score
from utils import f1_score
from utils import init_seed, _norm
import copy
import scipy.sparse as sps
import numpy as np
from scipy.sparse import csr_matrix


def csr_equal(matrix1, matrix2):
    # 检查矩阵是否具有相同的形状和非零元素数量
    if matrix1.shape != matrix2.shape or matrix1.nnz != matrix2.nnz:
        return False, f"Matrices do not have the same shape or number of non-zero elements"

        # 比较矩阵的data、indices和indptr属性
    data_equal = np.all(matrix1.data == matrix2.data)
    indices_equal = np.all(matrix1.indices == matrix2.indices)
    indptr_equal = np.all(matrix1.indptr == matrix2.indptr)

    # 如果所有属性都相等，则矩阵一致
    if data_equal and indices_equal and indptr_equal:
        return True, "Matrices are equal"
    else:
        # 如果数据不相等，需要进一步检查不一致的数量
        if not data_equal:
            num_discrepancies = np.sum(matrix1.data != matrix2.data)
            return False, f"Number of discrepancies in data: {num_discrepancies}"
            # 如果indices或indptr不相等，通常意味着矩阵结构不同，不需要计算不一致数量
        return False, "Matrices have different structures"


if __name__ == '__main__':
    init_seed(seed=777)
    parser = argparse.ArgumentParser()
    parser.add_argument('--model', type=str, default='GTN',
                        help='Model')
    parser.add_argument('--dataset', type=str, default="ACM",
                        help='Dataset')
    parser.add_argument('--epoch', type=int, default=200,
                        help='Training Epochs')
    parser.add_argument('--node_dim', type=int, default=64,
                        help='hidden dimensions')
    parser.add_argument('--num_channels', type=int, default=2,
                        help='number of channels')
    parser.add_argument('--lr', type=float, default=0.01,
                        help='learning rate')
    parser.add_argument('--weight_decay', type=float, default=0.001,
                        help='l2 reg')
    parser.add_argument('--num_layers', type=int, default=1,
                        help='number of GT/FastGT layers')
    parser.add_argument('--runs', type=int, default=10,
                        help='number of runs')
    parser.add_argument("--channel_agg", type=str, default='concat')
    parser.add_argument("--remove_self_loops", action='store_true', help="remove_self_loops")
    # Configurations for FastGTNs
    parser.add_argument("--non_local", action='store_true', help="use non local operations")
    parser.add_argument("--non_local_weight", type=float, default=0,
                        help="weight initialization for non local operations")
    parser.add_argument("--beta", type=float, default=0, help="beta (Identity matrix)")
    parser.add_argument('--K', type=int, default=1,
                        help='number of non-local negibors')
    parser.add_argument("--pre_train", action='store_true', help="pre-training FastGT layers")
    parser.add_argument('--num_FastGTN_layers', type=int, default=1,
                        help='number of FastGTN layers')

    args = parser.parse_args()
    print(args)

    epochs = args.epoch
    node_dim = args.node_dim
    num_channels = args.num_channels
    lr = args.lr
    weight_decay = args.weight_decay
    num_layers = args.num_layers
    #
    # with open('/home/Dyf/code/dataset/GTN/%s/node_features.pkl' % args.dataset, 'rb') as f:
    #     node_features = pickle.load(f)
    # with open('/home/Dyf/code/dataset/GTN/%s/edges.pkl' % args.dataset, 'rb') as f:
    #     edges = pickle.load(f)
    # with open('/home/Dyf/code/dataset/GTN/%s/labels.pkl' % args.dataset, 'rb') as f:
    #     labels = pickle.load(f)
    # if args.dataset == 'PPI':
    #     with open('../data/%s/ppi_tvt_nids.pkl' % args.dataset, 'rb') as fp:
    #         nids = pickle.load(fp)
    #
    # print("node_features.shape", node_features.shape)
    # print("edges len", len(edges))
    # # 从这里可以看出我们所谓的A矩阵有4个，大小均为 (8994, 8994) 但其中每一个在当前所在的邻接矩阵部分有值。
    # print("AP", edges[0].shape, edges[0].nnz)
    # print("PA", edges[1].shape, edges[1].nnz)
    # print("SP", edges[2].shape, edges[2].nnz)
    # print("PS", edges[3].shape, edges[3].nnz)
    #
    # num_nodes = edges[0].shape[0]
    # print("num_nodes,", num_nodes)

    # 现在你想将node_features保存到另一个pkl文件中
    node_features_save_path = '01_node_features.pkl'
    edges_save_path = '02_edges.pkl'
    labels_save_path = '03_labels.pkl'
    # nids_save_path = 'labels.pkl'

    # with open(node_features_save_path, 'wb') as f:
    #     pickle.dump(node_features, f)
    # with open(edges_save_path, 'wb') as f:
    #     pickle.dump(edges, f)
    # with open(labels_save_path, 'wb') as f:
    #     pickle.dump(labels, f)
    # with open(nids_save_path, 'wb') as f:
    #     pickle.dump(nids, f)

    with open(node_features_save_path, 'rb') as f:
        node_features = pickle.load(f)
    with open(edges_save_path, 'rb') as f:
        edges = pickle.load(f)
    with open(labels_save_path, 'rb') as f:
        labels = pickle.load(f)

    print(node_features.shape)
    print("edges len", len(edges))
    # print(labels)
    print("AP", edges[0].shape, edges[0].nnz)
    print("PA", edges[1].shape, edges[1].nnz)
    print("SP", edges[2].shape, edges[2].nnz)
    print("PS", edges[3].shape, edges[3].nnz)
    print( edges[3])