# src/matrix/compute_matirx_feature.py

#!/usr/bin/env python3
# -*- coding: utf-8 -*

import scipy.sparse as sparse
from scipy.sparse import csr_matrix, coo_matrix, load_npz, save_npz
from scipy.sparse.linalg import eigs
import time
import sys
import pandas as pd
import numpy as np
import os
import json
import argparse
#+++++++only for test++++++++++
import matplotlib.pyplot as plt
#++++++++++++++++++++++++++++++
import seaborn as sns

from multiprocessing import Pool
from multiprocessing import Manager

from matirx_utility import read_data

def make_parser(): 
    parser = argparse.ArgumentParser(
        prog="Compute matrix feature",
        description="Compute matrix feature.")
    parser.add_argument(
            "MatDIR",
            metavar="MatrixDIR",
            help="Destination matrix files."
        )
    parser.add_argument(
            "SaveDIR",
            metavar="SaveFeatureDIR",
            help="Destination Save matrix feature."
        )
    parser.add_argument(
            "-p", "--proc",
            dest="num_proc",
            default=os.cpu_count()/2,
            help="Number of processes used to run the task, default:%(default)s"
        )
    parser.add_argument(
            "-tp", "--turningpoint",
            dest="turn_point",
            default=1,
            help="Turning point of matrix."
        )
    return parser

#only for test
def plot_matrix_data(val, refine, title="", file="matrix"):
    """
    Plots matrix data.
    """
    plt.figure(figsize=(20, 20))
    sns.histplot(data = val, kde=False, discrete=True, binwidth=1, stat="percent")
    plt.xlabel("nnz per row")
    plt.title(title + " (refine = %d)" % refine)
    #file_path = "plot/" + file + "_%d.png" % refine
    file_path = "./" + file + "_%d.png" % refine
    plt.savefig(file_path, dpi = 500, bbox_inches='tight')






def get_mat_normal_feature(my_matirx, feature_dict,matrix_path,save_path):

    n    = my_matirx.shape[0]
    nnz  = my_matirx.nnz
    sparsity = float(nnz) / float(n*n)
    nnz_per_row_ave = nnz / n
    # print("=====================")
    # print(">> matrix feature:")
    # print(">> sparsity is {0:.3e}".format(sparsity))
    # print(">> nnz per row is {0}".format(str(nnz_per_row)))
    # print("=====================")

    matrix = my_matirx
    size = matrix.shape[0]
    num_nonzeros = matrix.count_nonzero()
    #file = "mat_20X20"
    
    filename = os.path.split(matrix_path)[1]
    file = filename
    print("size of matrix %s: %d" % (file, size))
    print("nnz of matrix %s : %d\n" % (file, num_nonzeros))
    
    max_eigenvalue = eigs(matrix, k=5, return_eigenvectors=False, which='LM')
    
    diag = matrix.diagonal()
    diag_max = max(diag)
    diag_min = min(diag)
    num_diag_pos = np.array([diag > 0]).sum()
    num_diag_neg = np.array([diag < 0]).sum()

    nnz_per_row = matrix.getnnz(axis=1)
    nnz_per_row_max = max(nnz_per_row)
    nnz_per_row_min = min(nnz_per_row)
    
    bins_lower = int(np.floor(nnz_per_row_min))
    bins_upper = int(np.ceil(nnz_per_row_max))
    bins_nnz = [i for i in range(bins_lower, bins_upper+2, 1)]
    nnz_per_row_range = pd.cut(nnz_per_row, bins_nnz, right=False)
    nnz_per_row_range_count = pd.value_counts(nnz_per_row_range, sort=False)
    if (nnz_per_row_range_count.sum() == size):
        print("nnz per row range count success!")
    else:
        print("nnz per row range count = %d, nnz = %d" % (nnz_per_row_range_count.sum(), size))

    ## Off diagonal
    zero_diag_matrix = matrix - coo_matrix((diag, (range(size), range(size))), shape=(size, size))

    offdiag_max = max(abs(zero_diag_matrix.data))
    offdiag_min = min(abs(zero_diag_matrix.data))
    offdiag_min_act = min(zero_diag_matrix.data)

    row_ptr = zero_diag_matrix.indptr
    # off-diag values
    num_offdiag_pos = 0
    num_offdiag_neg = 0
    # diagonal dominant
    num_diag_dominant = 0
    num_diag_non_dominant = 0
    num_equal_diag_dominant = 0
    num_equal_diag_non_dominant = 0

    # offd all negative -- Z property
    num_Z_row = 0
    num_dd_Z_row = 0
    max_bandwidth = 0
    
    for i in range(len(row_ptr)-1):
        sum = 0
        Z_flag = 0
        tmp = max(zero_diag_matrix.indices[row_ptr[i]:row_ptr[i+1]]) - min(zero_diag_matrix.indices[row_ptr[i]:row_ptr[i+1]])
        if (tmp > max_bandwidth):
            max_bandwidth = tmp
        
        for j in range(row_ptr[i], row_ptr[i+1]):
            if (zero_diag_matrix.data[j] > 0):
                Z_flag = 1
                num_offdiag_pos = num_offdiag_pos + 1
            else:
                num_offdiag_neg = num_offdiag_neg + 1
                
            sum = sum + abs(zero_diag_matrix.data[j])
            
        if diag[i] > sum :
            num_diag_dominant = num_diag_dominant + 1
        else :
            num_diag_non_dominant = num_diag_non_dominant + 1
        
        if diag[i] >= sum :
            num_equal_diag_dominant = num_equal_diag_dominant + 1
        else :
            num_equal_diag_non_dominant = num_equal_diag_non_dominant + 1
            
        if Z_flag == 0:
            num_Z_row = num_Z_row + 1
            if diag[i] >= sum :
                num_dd_Z_row = num_dd_Z_row + 1

    ## Multiscale
    ratio_max = 0
    ratio_min = 0xFFFFFFF
    ratio_row = []
    for i in range(len(row_ptr)-1):
        row_data = matrix.data[row_ptr[i]:row_ptr[i+1]]
        max_row_data = max(abs(row_data))
        min_row_data = min(abs(row_data))
        ratio = max_row_data / min_row_data
        ratio_row.append(ratio)
        if (ratio > ratio_max):
            ratio_max = ratio
        if (ratio < ratio_min):
            ratio_min = ratio

    bins_upper = int(np.ceil(np.log10(ratio_max)))
    bins_ratio = [i for i in range(0, bins_upper+1, 1)]
    ratio_row_range = pd.cut(np.log10(ratio_row), bins_ratio, right=False)
    ratio_row_range_count = pd.value_counts(ratio_row_range, sort=False)
    if (ratio_row_range_count.sum() == size):
        print("ratio row range count success!\n")
    else :
        print("ratio row range count = %d, size = %d" % (ratio_row_range_count.sum(), size))
        
    ## Multiscale row 
    row = np.where(np.log10(ratio_row)>(bins_upper-1))[0][0]
    data_row = matrix.data[row_ptr[row]:row_ptr[row+1]]
    max_data_row = max(abs(data_row))
    min_data_row = min(abs(data_row))

    ## print
    temp = sys.stdout
    jsonpath = os.path.join(save_path,filename)
    #json_str = json.dumps(feature_dict)
    output_file = '{0}_feature_new.log'.format(jsonpath)
    #output_file = "./pid.log"
    file_ptr = open(output_file, 'w')
    sys.stdout = file_ptr
    print("==============================================")
    print("size of matrix:".ljust(30) + "\t %d" % size)
    feature_dict["size"] = size
    print("num of nonzeors:".ljust(30) + "\t %d" % num_nonzeros)
    feature_dict["nnz"] = num_nonzeros
    print("sparsity".ljust(30) + "\t %e" % sparsity)
    feature_dict["sparsity"] = sparsity
    print("max nnz per row:".ljust(30) + "\t %d" % nnz_per_row_max)
    feature_dict["nnz_per_row_max"] = int(nnz_per_row_max)
    print("min nnz per row:".ljust(30) + "\t %d" % nnz_per_row_min)
    feature_dict["nnz_per_row_min"] = int(nnz_per_row_min)
    print("ave nnz per row:".ljust(30) + "\t %e" % nnz_per_row_ave)
    feature_dict["nnz_per_row_ave"] = nnz_per_row_ave
    print("max bandwidth:".ljust(30) + "\t %d" % max_bandwidth)
    feature_dict["max_bandwidth"] = int(max_bandwidth)
    print("----------------------------------------------")
    print("num of positive diag:".ljust(30) + "\t %d" % num_diag_pos)
    feature_dict["num_diag_pos"] = int(num_diag_pos)
    print("num of negative diag:".ljust(30) + "\t %d" % num_diag_neg)
    feature_dict["num_diag_neg"] = int(num_diag_neg)
    print("max diag value:".ljust(30) + "\t %.4e" % diag_max)
    feature_dict["diag_max"] = diag_max
    print("min diag value:".ljust(30) + "\t %.4e" % diag_min)
    feature_dict["diag_min"] = diag_min
    print("----------------------------------------------")
    print("num of positive off-diag:".ljust(30) + "\t %d" % num_offdiag_pos)
    feature_dict["num_offdiag_pos"] = int(num_offdiag_pos)
    print("num of negative off-diag:".ljust(30) + "\t %d" % num_offdiag_neg)
    feature_dict["num_offdiag_neg"] = int(num_offdiag_neg)
    print("max off-diag value(abs):".ljust(30) + "\t %.4e" % offdiag_max)
    feature_dict["offdiag_max(abs)"] = offdiag_max
    print("min off-diag value(abs):".ljust(30) + "\t %.4e" % offdiag_min)
    feature_dict["offdiag_min(abs)"] = offdiag_min
    print("min off-diag value:".ljust(30) + "\t %.4e" % offdiag_min_act)
    feature_dict["offdiag_min"] = offdiag_min_act
    print("----------------------------------------------")
    print("num of rows(diag dominant):".ljust(30) + "\t %d (%.2f%%)" % (num_diag_dominant , num_diag_dominant/size*100))
    print("num of rows(diag non-dominant):".ljust(30) + "\t %d" % num_diag_non_dominant)
    print("num of rows(Z-property):".ljust(30) + "\t %d " % (num_Z_row))
    print("num of rows(dd and Z-property):".ljust(30) + "\t %d" % (num_dd_Z_row))       
    print("----------------------------------------------")
    print("max ratio of row value:".ljust(30) + "\t %.4e" % ratio_max)
    print("    ------------------------------------------")
    print("    max value of the row (abs):".ljust(25) + "\t %.4e" % max_data_row)
    print("    min value of the row (abs):".ljust(25) + "\t %.4e" % min_data_row)
    print("    ------------------------------------------")
    print("min ratio of row value:".ljust(30) + "\t %.4e" % ratio_min)
    print("----------------------------------------------")
    print("max eigenvalue:".ljust(30) + "\t %.2e + %.2e i" % (max_eigenvalue[-1].real, max_eigenvalue[-1].imag))
    print("==============================================")
    
    ## Ratio of value per row
    print("\n=====================================================")
    print("range of ratio".ljust(20) + "|\tcount \t percentage  ")
    print("-----------------------------------------------------")
    for i in range(len(ratio_row_range_count)):
        range_str = "[1.0E+%2d , 1.0E+%2d)" % (bins_ratio[i], bins_ratio[i+1])
        print(range_str.ljust(20) + "|\t%6d \t" % (ratio_row_range_count[i]) + ("%2.2f%%" % (ratio_row_range_count[i]/size*100)).rjust(8))
    print("-----------------------------------------------------")
    range_str = "[1.0E+%2d , 1.0E+%2d)" % (bins_ratio[0], bins_ratio[-1])
    print(range_str.ljust(15) + "|\t%6d \t" % (ratio_row_range_count.sum()) + ("100%%".rjust(8)))
    print("=====================================================\n")

    
    ## nnz per row
    print("\n=====================================================")
    print("nnz per row".ljust(15) + "|\tcount \t percentage  ")
    print("-----------------------------------------------------")
    for i in range(len(nnz_per_row_range_count)):
        range_str = "[%2d , %2d)" % (bins_nnz[i], bins_nnz[i+1])
        print(range_str.ljust(15) + "|\t%6d \t" % (nnz_per_row_range_count[i]) + ("%2.2f%%" % (nnz_per_row_range_count[i]/size*100)).rjust(8))
    print("-----------------------------------------------------")
    range_str = "[%2d , %2d)" % (bins_nnz[0], bins_nnz[-1])
    print(range_str.ljust(15) + "|\t%6d \t" % (nnz_per_row_range_count.sum()) + ("100%%".rjust(8)))
    print("=====================================================\n")
    
    sys.stdout = temp

    #use this func to plot data pic.
    #plot_matrix_data(nnz_per_row, refine, "nnz per row", "%s_nnz" % file)
    
    
    #print(feature_dict)
    return feature_dict

def compute_diag_average_length(i,rowptr,col):
    #输入已经切片了
    temp = 0
    max_temp = 0
    # print(i,rowptr,col,rowptr[1] - rowptr[0])
    for k in range(rowptr[1] - rowptr[0]):
        # print("row is {0}, col[k] is {1}".format(i,col[k]))
        length = abs(col[k] - i)
        temp += abs(length)
        max_temp = max(max_temp, length)
        # print(diag_length_by_row)
    # NOTE:很奇怪的问题: for循环之后打印和计算操作都无效
    return (temp,max_temp)

def get_diag_average_length(nprocs,turn_point,my_matirx,feature_dict):
    n    = my_matirx.shape[0]
    nnz  = my_matirx.nnz
    rowptr = my_matirx.indptr
    col  = my_matirx.indices

    result_list = list(range(n))

    pool = Pool(nprocs)
    for i in range(n):
        result_list[i] = pool.apply_async(func=compute_diag_average_length, args=( i, rowptr[i:i+1+1], col[rowptr[i]:rowptr[i+1]] ))
    print(">> compute diag_average_length")
    pool.close()
    pool.join()

    #NOTE: 可以通过 result.get 获得每个 apply_async 的返回值，后续重分配矩阵有用
    # i = 0
    # for result in result_list:
    #     result_list[i] = result.get()
    #     i += 1 
    for i in range(n):
        result_list[i] = result_list[i].get()
    # result_sum = float(sum(result_list))
    result_sum = 0
    result_max = 0
    for i in range(n):
        result_sum += result_list[i][0]
        result_max = max(result_max, result_list[i][1])
    
    #为了到处json做的转换
    result_sum = int(result_sum)
    result_max = int(result_max)

    feature_dict["diag_average_length(max)"] = result_max
    feature_dict["diag_average_length(raw)"] = result_sum
    feature_dict["diag_average_length(nnz mean)"] = result_sum/(nnz)
    feature_dict["diag_average_length(nnz and n mean)"] = result_sum/(nnz * n)
    feature_dict["diag_average_length(turning_point)"] = result_sum/(nnz * n) * turn_point
    print(">> end")
    return feature_dict

def save2json(feature_dict,matrix_path,save_path):
    filename = os.path.split(matrix_path)[1]
    jsonpath = os.path.join(save_path,filename)
    #json_str = json.dumps(feature_dict)
    savename = '{0}_feature_new.json'.format(jsonpath)
    with open(savename, 'w') as json_file:
        json.dump(feature_dict, json_file, indent=2, sort_keys=True)
    #    json_file.write(json_str)
    print("save into {0}".format(savename))

def main():
    # matrix_path = "/home/matrix/solverchanllenge2021/solverchallenge21_01/jxpamg_matrix01"
    # matrix_path = "/home/matrix/solverchanllenge2021/solverchallenge21_02_new/jxpamg_matrix02"
    # matrix_path = "/home/matrix/solverchanllenge2021/solverchallenge21_03/jxpamg_matrix03"
    # matrix_path = "/home/matrix/solverchanllenge2021/solverchallenge21_04/jxpamg_matrix04"
    # matrix_path = "/home/matrix/hsl_matrix/jxpamg_matrix"
    # matrix_path = "/home/matrix/iter00001/A/jxpamg_iter00001_job0"
    # Test
    # matrix_path = "/home/mrz/code/python_script/pic_plot/plot_matrix/mat_20X20"

    parser = make_parser()
    args = parser.parse_args()
    my_matirx = read_data(args.MatDIR)
    #print(my_matirx)
    feature_dict = {}
    feature_dict = get_mat_normal_feature(my_matirx, feature_dict,args.MatDIR,args.SaveDIR)
    feature_dict = get_diag_average_length(int(args.num_proc),int(args.turn_point),my_matirx,feature_dict)

    save2json(feature_dict,args.MatDIR,args.SaveDIR)

# def test():
#     matrix_path = "/home/matrix/solverchanllenge2021/solverchallenge21_01/jxpamg_matrix01"
#     my_matirx = read_data(matrix_path)

#     csr_mat = my_matirx.tocsr()
#     n    = csr_mat.shape[0]
#     nnz  = csr_mat.nnz
#     rowptr = csr_mat.indptr
#     col  = csr_mat.indices
#     value  = csr_mat.data

#     pool = Pool(int(os.cpu_count()/2))

#     #pool.map(partial(parrel_print, rowptr=rowptr,col=col,value=value), range(n))
    
#     for i in range(56):
#     # for i in range(n):
#         pool.apply_async(func=test_parrel_print, args=( i, rowptr[i:i+1+1], col[rowptr[i]:rowptr[i+1]], value[rowptr[i]:rowptr[i+1]] ) )
#     pool.close()
#     pool.join()

# def test_parrel_print(i, rowptr,col,value):
#     print(i, rowptr, col, value)

if __name__=="__main__":
    # test()
    main()
    #/home/dh/code/jxpamg/example/input/mat_20X20
