
import numpy as np
import numpy as np
from io import StringIO
from scipy.io import mmread
import os
import re
from glob import glob
import h5py


# def process_pointcloud(point_cloud):
#     # Input:
#     #   (N, 4)
#     # Output:
#     #   voxel_dict

#     scene_size = np.array([4, 40, 48], dtype = np.float32)
#     voxel_size = np.array([0.4, 0.2, 0.2], dtype = np.float32)
#     grid_size = np.array([10, 200, 240], dtype = np.int64)
#     lidar_coord = np.array([0, 20, 3], dtype = np.float32)
#     max_point_number = 45

#     np.random.shuffle(point_cloud)

#     shifted_coord = point_cloud[:, :3] + lidar_coord
#     # reverse the point cloud coordinate (X, Y, Z) -> (Z, Y, X)
#     voxel_index = np.floor(
#         shifted_coord[:, ::-1] / voxel_size).astype(np.int)

#     bound_x = np.logical_and(
#         voxel_index[:, 2] >= 0, voxel_index[:, 2] < grid_size[2])
#     bound_y = np.logical_and(
#         voxel_index[:, 1] >= 0, voxel_index[:, 1] < grid_size[1])
#     bound_z = np.logical_and(
#         voxel_index[:, 0] >= 0, voxel_index[:, 0] < grid_size[0])

#     bound_box = np.logical_and(np.logical_and(bound_x, bound_y), bound_z)

#     point_cloud = point_cloud[bound_box]
#     voxel_index = voxel_index[bound_box]

#     # [K, 3] coordinate buffer as described in the paper
#     coordinate_buffer = np.unique(voxel_index, axis = 0)

#     K = len(coordinate_buffer)
#     T = max_point_number

#     # [K, 1] store number of points in each voxel grid
#     number_buffer = np.zeros(shape = (K), dtype = np.int64)

#     # [K, T, 7] feature buffer as described in the paper
#     feature_buffer = np.zeros(shape = (K, T, 7), dtype = np.float32)

#     # build a reverse index for coordinate buffer
#     index_buffer = {}
#     for i in range(K):
#         index_buffer[tuple(coordinate_buffer[i])] = i

#     for voxel, point in zip(voxel_index, point_cloud):
#         index = index_buffer[tuple(voxel)]
#         number = number_buffer[index]
#         if number < T:
#             feature_buffer[index, number, :4] = point
#             number_buffer[index] += 1

#     feature_buffer[:, :, -3:] = feature_buffer[:, :, :3] - \
#         feature_buffer[:, :, :3].sum(axis = 1, keepdims = True)/number_buffer.reshape(K, 1, 1)

#     voxel_dict = {'feature_buffer': feature_buffer,
#                   'coordinate_buffer': coordinate_buffer,
#                   'number_buffer': number_buffer}

#     return voxel_dict

# def process_pointcloud(point_cloud):
    # Input:
    #   (N, 4)
    # Output:
    #   voxel_dict


    
####feature list
#   manual_feature
#   program_feature 
#   profiling_feature
#   dense_matrix
#   density_map
# nlpkkt200

'''
path_directory = '../data/matrix_suite_tar/'
density_size = 256

# data_size = (3 * 2868) // 4
# for fn in sorted(glob('data/matrix_suite_tar/*'))[data_size:]:
#     sparse_name = fn.split('/')[2]
#     print('#working on', sparse_name, '...')
    
#     filename_h5py = path_directory + f'{sparse_name}/' + sparse_name + ".h5py"
#     if os.path.exists(filename_h5py):
#         continue

# sparse_name_list = ['IG5-13', 'IG5-14', 'IG5-15', 'IG5-18', 'in-2004']
sparse_name_list = ['gen2', '176bit', 'bcsstk13', 'biplane-9', 't2d_q9', 'fv2', 'fv3', 'big_dual', 
                    'ch7-8-b3', 'luxembourg_osm', 'cond-mat-2003', 'astro-ph', 
                    'wing', 'TSOPF_RS_b39_c7', 'airfoil_2d', 'jan99jac120sc', 
                    '3D_28984_Tetra', 'rajat25', 'c-69', 'rajat16', 'Trec13', 
                    'Zd_Jac2_db', 'bibd_17_8', 'image_interp', 'net4-1', 'tsyl201', 
                    'matrix-new_3', 'ct20stif', 'stat96v2', 'belgium_osm', 'Chevron3', 'parabolic_fem']


                    # ['gen2', '176bit', 'bcsstk13', 'biplane-9', 
                    # 'ch7-8-b3', 'luxembourg_osm', 'cond-mat-2003', 'astro-ph', 
                    # '3D_28984_Tetra', 'rajat25', 'c-69', 'rajat16', 
                    # 'net4-1', 'tsyl201', 'matrix-new_3', 'ct20stif']

# manual_feature_all = []
# density_map_all = []

# nnz_number_all = []


for sparse_name in sparse_name_list:

    
    filename = path_directory + f'{sparse_name}/' + sparse_name + ".mtx"

    filename_out = path_directory + f'{sparse_name}/' + sparse_name + "_voxel.h5py"
    
    if (os.path.exists(filename_out)):
        print(filename_out, 'exist')
        # input()
        continue

    sparse_matrix = mmread(filename)
    # print(sparse_matrix.row)
    # print(sparse_matrix.col)
    # print(sparse_matrix.data)
    # print(sparse_matrix.shape)


    nnz_number = sparse_matrix.getnnz()
    row_num = sparse_matrix.shape[0]
    col_num = sparse_matrix.shape[1]
    print("name row col:", sparse_name, row_num, col_num)

    # nnz_per_row = np.zeros(row_num)
    # nnz_per_col = np.zeros(col_num)

    density_map = np.zeros((density_size, density_size))
    row_ratio = row_num / density_size
    col_ratio = col_num / density_size
    area_number = row_ratio * col_ratio


    for i in range(nnz_number):
        row_tmp = sparse_matrix.row[i]
        col_tmp = sparse_matrix.col[i]
        # nnz_per_col[col_tmp] += 1
        # nnz_per_row[row_tmp] += 1
        
        row_density = int(row_tmp // row_ratio)
        col_density = int(col_tmp // col_ratio)
        density_map[row_density][col_density] += 1
    
    density_local_map = density_map / area_number
    density_map = density_map / nnz_number
    
    
    # density_map_all.append(density_map)
        
    # max_nnz_per_row = max(nnz_per_row)
    # min_nnz_per_row = min(nnz_per_row)
    # mean_nnz_per_row = np.mean(nnz_per_row)
    # std_nnz_per_row = np.std(nnz_per_row)

    # max_nnz_per_col = max(nnz_per_col)
    # min_nnz_per_col = min(nnz_per_col)
    # mean_nnz_per_col = np.mean(nnz_per_col)
    # std_nnz_per_col = np.std(nnz_per_col)

    # nnz_number, row_num, col_num, row ratio, col ration, mean/min/max_nnz per row/col 
    # manual_feature = [nnz_number, row_num, col_num, 1 / row_ratio,  1 / col_ratio,  # 0 1 2 3 4
    #                 max_nnz_per_row, min_nnz_per_row, mean_nnz_per_row, std_nnz_per_row, # 5, 6, 7, 8
    #                 max_nnz_per_col, min_nnz_per_col, mean_nnz_per_col, std_nnz_per_col] # 9,10,11,12
    
    #    0     1     2     3
    #  rows, cols, nnz, sparsity, 
    #    4              5           6            7
    # avr_nnz_row, min_nnz_row, max_nnz_row, var_nnz_row,
    #    8              9           10          11 
    # avr_nnz_col, min_nnz_col, max_nnz_col, var_nnz_col, 
    
    # manual_feature_all.append(manual_feature)
    # nnz_number_all.append(nnz_number)

    # whole dense matrix
    # dense_matrix = sparse_matrix.toarray()
    # dense_matrix[dense_matrix != 0] = 1 # set value for position
    # dense_matrix = np.array(dense_matrix, dtype=np.int8)
    # print(dense_matrix.shape)
    
    
    row_point = sparse_matrix.row[:]
    col_point = sparse_matrix.col[:]
    
    point_cloud = list(zip(row_point, col_point))[:]
    point_cloud = np.array(point_cloud)
    np.random.shuffle(point_cloud)

    scene_size = np.array([row_num, col_num])
    grid_size = np.array([density_size, density_size])
    voxel_size = scene_size / grid_size
    
    voxel_index = np.floor(point_cloud / voxel_size).astype(int)
    point_cloud = point_cloud / scene_size
    coordinate_buffer = np.unique(voxel_index, axis = 0)
    
    max_point_number = 45
    K = len(coordinate_buffer)
    T = max_point_number

    # [K, 1] store number of points in each voxel grid
    number_buffer = np.zeros(shape = (K), dtype = np.int64)

    # [K, T, 4] feature buffer as described in the paper
    feature_buffer = np.zeros(shape = (K, T, 4), dtype = np.float32)
    
    # build a reverse index for coordinate buffer
    index_buffer = {}
    for i in range(K):
        index_buffer[tuple(coordinate_buffer[i])] = i

    for voxel, point in zip(voxel_index, point_cloud):
        index = index_buffer[tuple(voxel)]
        number = number_buffer[index]
        if number < T:
            feature_buffer[index, number, :2] = point
            number_buffer[index] += 1

    feature_buffer[:, :, -2:] = feature_buffer[:, :, :2] - \
        feature_buffer[:, :, :2].sum(axis = 1, keepdims = True) / number_buffer.reshape(K, 1, 1)

    # voxel_dict = {'feature_buffer': feature_buffer,
    #                 'coordinate_buffer': coordinate_buffer,
    #                 'number_buffer': number_buffer}
    
    density_buffer = np.zeros(shape = (K, 2), dtype = np.float32)
    for i in range(K):
        # index_buffer[tuple(coordinate_buffer[i])] = i
        row_, col_ = coordinate_buffer[i][0], coordinate_buffer[i][1]
        density_buffer[i, 0] = density_local_map[row_, col_]
        density_buffer[i, 1] = density_map[row_, col_]
    
    print(filename_out, 'over')
    
    with h5py.File(filename_out, 'w') as f:
        f.create_dataset('feature_buffer',    data=feature_buffer)
        f.create_dataset('coordinate_buffer', data=coordinate_buffer)
        f.create_dataset('number_buffer',     data=number_buffer)
        f.create_dataset('density_buffer',     data=density_buffer)
        f.create_dataset('nnz_number',        data=nnz_number)
        
'''

path_directory = '../data/matrix_suite_tar/'

# sparse_name_list = ['gen2', '176bit', 'bcsstk13', 'biplane-9', ]
                    # 'ch7-8-b3', 'luxembourg_osm', 'cond-mat-2003', 'astro-ph', 
                    # '3D_28984_Tetra', 'rajat25', 'c-69', 'rajat16', 
                    # 'net4-1', 'tsyl201', 'matrix-new_3', 'ct20stif']
                    
sparse_name_list = ['gen2', '176bit', 'bcsstk13', 'biplane-9', 't2d_q9', 'fv2', 'fv3', 'big_dual', 
                    'ch7-8-b3', 'luxembourg_osm', 'cond-mat-2003', 'astro-ph', 
                    'wing', 'TSOPF_RS_b39_c7', 'airfoil_2d', 'jan99jac120sc', 
                    '3D_28984_Tetra', 'rajat25', 'c-69', 'rajat16', 'Trec13', 
                    'Zd_Jac2_db', 'bibd_17_8', 'image_interp', 'net4-1', 'tsyl201', 
                    'matrix-new_3', 'ct20stif', 'stat96v2', 'belgium_osm', 'Chevron3', 'parabolic_fem']

feature_buffer_all = []
coordinate_buffer_all = []
number_buffer_all = []
density_buffer_all = []
nnz_number_all = []

csr_length = 0
csr_length_list = [0, ]

for sparse_name in sparse_name_list:

    print(sparse_name)
    filename_h5py = path_directory + f'{sparse_name}/' + sparse_name + "_voxel.h5py"
    
    print('#loading dataset', filename_h5py, '...')
    cache = h5py.File(filename_h5py, 'r')
    
    feature_buffer = cache['feature_buffer'][()]
    coordinate_buffer = cache['coordinate_buffer'][()]
    number_buffer = cache['number_buffer'][()]
    density_buffer = cache['density_buffer'][()]
    nnz_number = cache['nnz_number'][()]
    
    csr_length += len(feature_buffer)
    csr_length_list.append(csr_length)
    
    feature_buffer_all.append(feature_buffer)
    coordinate_buffer_all.append(coordinate_buffer)
    number_buffer_all.append(number_buffer)
    density_buffer_all.append(density_buffer)
    nnz_number_all.append(nnz_number)
    
    cache.close()

csr_length_list = np.array(csr_length_list)
nnz_number_all = np.array(nnz_number_all)

feature_buffer_all = np.concatenate(feature_buffer_all, axis=0)
coordinate_buffer_all = np.concatenate(coordinate_buffer_all, axis=0)
number_buffer_all = np.concatenate(number_buffer_all, axis=0)
density_buffer_all = np.concatenate(density_buffer_all, axis=0)


# filename_out = '../data/16_voxel.h5py'
filename_out = '../data/32_voxel.h5py'

with h5py.File(filename_out, 'w') as f:
    f.create_dataset('feature_buffer',    data=feature_buffer_all)
    f.create_dataset('coordinate_buffer', data=coordinate_buffer_all)
    f.create_dataset('number_buffer',     data=number_buffer_all)
    f.create_dataset('density_buffer',     data=density_buffer_all)
    f.create_dataset('nnz_number',        data=nnz_number_all)
    f.create_dataset('csr_store_length',        data=csr_length_list)
