from func_timeout import func_set_timeout,FunctionTimedOut
# from CGAL.CGAL_Kernel import *
import open3d as o3d
import numpy as np
import numba as nb
import torch
import time
import os
'''
OPEN3D MANUAL:
https://www.open3d.org/docs/release/tutorial/
'''

'''
PARAMETER SETTING
'''
DOWNPCD_PATH = './reconstruction/downpcd/'
# PLY_PATH = './reconstruction/ply/'
PLY_PATH = './reconstruction/yebo/ply/'
# PLY_PATH = './reconstruction/yebo/test/'
MAX_FILE_SIZE = 30 * 1024 * 1024
MAX_POINTS_SIZE = 1000000

def time_out(fn):
    voxel_size = 0.02 # ONLY for down sampling
    def wrapper(*args, **kwargs):
        try:
            fn(*args, **kwargs)
            # print(result)
            # return result
        except FunctionTimedOut:
            downpcd = down_sample_voxel(args[0], voxel_size, 's', args[2])
            result_timeout = fn(downpcd, args[1], args[2])
            print(f'[Info] TimeOut! \n [Info] Down Sampling Points: {np.asarray(downpcd.points).size / 3}')
            return result_timeout

    return wrapper

@nb.jit(nopython=True)
def compute_vn_kernel(vertices, faces):
    lerp = lambda old_val, new_val, new_weight: new_weight * new_val + (1 - new_weight) * old_val
    normals = np.zeros_like(vertices)
    counts = np.zeros_like(vertices)
    for f in faces:
        p1 = vertices[f[0]]
        p2 = vertices[f[1]]
        p3 = vertices[f[2]]
        normal = np.cross(p2 - p1, p3 - p1)
        normal = normal / np.linalg.norm(normal)
        normals[f[0]] = lerp(normals[f[0]], normal, 1/(counts[f[0]] + 1))
        normals[f[1]] = lerp(normals[f[1]], normal, 1/(counts[f[1]] + 1))
        normals[f[2]] = lerp(normals[f[2]], normal, 1/(counts[f[2]] + 1))
        counts[f[0]] += 1
        counts[f[1]] += 1
        counts[f[2]] += 1
    return normals

def compute_vn(mesh):
    """ compute vertex normals
        NOTE: for trimesh object, we recommend using this function to compute vertex normals
    
    Args:
        mesh (trimesh.Trimesh): the input mesh
    Returns:
        vertex_normals (np.ndarray): normalized vertex normals of shape==(nv, 3)
    """
    return compute_vn_kernel(mesh.vertices, mesh.faces)

def calculate_normals(pcd):
    radius = 0.1
    max_nn = 30  
    pcd.estimate_normals(
        search_param=o3d.geometry.KDTreeSearchParamHybrid(radius, max_nn))
    pcd.orient_normals_consistent_tangent_plane(100) # prevent disorient normals
    return pcd

def down_sample_voxel(pcd, voxel_size, mode, filename):
    # down_sample
    downpcd = pcd.voxel_down_sample(voxel_size)

    if mode == 'n':
        downpcd_normal = calculate_normals(downpcd)
        return downpcd_normal
    elif mode == 's':
        o3d.io.write_point_cloud(DOWNPCD_PATH + filename + '_down.pcd', downpcd)
    elif mode == 'ns':
        downpcd_normal = calculate_normals(downpcd)
        o3d.io.write_point_cloud(DOWNPCD_PATH + filename + '_down_normal.pcd', downpcd_normal)
        return downpcd_normal
    else:
        return downpcd
    
    return -1

def get_radii(pcd):
    distance = pcd.compute_nearest_neighbor_distance()
    min = np.min(distance)
    max = np.max(distance)
    mean = np.mean(distance)
    delta1 = mean - min
    # delta2 = max - mean
    radii = []
    rate = 2
    for i in range(3):
        radii.append(mean + (rate + i) * delta1)
        # if i == 1:
        #     num_low = mean + (rate + 1) * delta1
        #     radii.append(num_low)
        # elif i == 2:
        #     num_high = mean + (rate + 2) * delta1
        #     radii.append(num_high)
        # else:
        #     radii.append(mean + rate * delta1)
        
    radii = [round(x, 4) for x in radii]
    for i in range(len(radii)):
        print(f'[Info] Radii: {radii[i]}')
    return radii

# Ball pivoting
def ball_pivoting(pcd, radii, filename=''):
    # estimate normals
    normalpcd = calculate_normals(pcd)
    # for sparse points
    # radii = [0.01, 0.05, 0.1] [0.005, 0.085, 0.15] [0.01, 0.05, 0.1, 1] [0.005, 0.045, 0.085, 0.15]
    # for dense points
    # radii = [0.005, 0.01, 0.02, 0.04]
    rec_mesh = o3d.geometry.TriangleMesh.create_from_point_cloud_ball_pivoting(
        normalpcd, o3d.utility.DoubleVector(radii)
    )
    # save file
    radius = [str(i) for i in radii]
    radius_str = ','.join(radius).replace(',','_')
    STR = filename.split('.')[0]
    PLY_OUT_PATH = PLY_PATH + STR + '_ball_' + radius_str + '.ply'
    o3d.io.write_triangle_mesh(PLY_OUT_PATH, rec_mesh)
    return rec_mesh, PLY_OUT_PATH

def alpha_shapes(pcd, alpha, filename=''):
    # alpha = 0.03
    print(f"[Info] alpha = {alpha:.3f}")
    normalpcd = calculate_normals(pcd)
    alpha_mesh = o3d.geometry.TriangleMesh.create_from_point_cloud_alpha_shape(
        normalpcd, alpha)
    STR = filename.split('.')[0]
    PLY_OUT_PATH = PLY_PATH + STR + '_alpha_' + str(alpha) + '.ply'
    o3d.io.write_triangle_mesh(PLY_OUT_PATH, alpha_mesh)
    return alpha_mesh, PLY_OUT_PATH

def possion(pcd, depth, filename=''):
    # pcd = deepcopy(rawpcd)
    normalpcd = calculate_normals(pcd)
    # reconstruction
    possion_mesh, densities = o3d.geometry.TriangleMesh.create_from_point_cloud_poisson(
        normalpcd, depth) 
    vertices_to_remove = densities < np.quantile(densities, 0.10) # delete the small density point cloud
    possion_mesh.remove_vertices_by_mask(vertices_to_remove)
    STR = filename.split('.')[0]
    PLY_OUT_PATH = PLY_PATH + STR + '_possion_' + str(depth) + '.ply'
    o3d.io.write_triangle_mesh(PLY_OUT_PATH, possion_mesh)
    return possion_mesh, PLY_OUT_PATH

def generate_labels(index, original_labels):
    '''
    Now we have the segment of each vertices, so we can convert the index into label
    '''
    labels = []
    color_table = []
    for i in index:
        if i == 0:
            labels.append([0, original_labels[0]])
            color_table.append([193, 255, 193]) # green
        elif i == 1:
            labels.append([1, original_labels[1]])
            color_table.append([151, 255, 255]) # blue
        elif i == 2:
            labels.append([2, original_labels[2]])
            color_table.append([255, 222, 173]) # yellow
        elif i == 3:
            labels.append([3, original_labels[3]])
            color_table.append([255, 105, 180]) # pink
        else:
            labels.append([100, 'others'])
            color_table.append([139, 136, 120]) # gray
    # reslabels = [sublist[1] for sublist in labels]
    color_table_tensor = torch.Tensor(color_table) 

    index_unique = torch.unique(index) # distinct
    labels_unique = []
    for i in index_unique:
        if i == 0:
            labels_unique.append([0, original_labels[0]])
        elif i == 1:
            labels_unique.append([1, original_labels[1]])
        elif i == 2:
            labels_unique.append([2, original_labels[2]])
        elif i == 3:
            labels_unique.append([3, original_labels[3]])
        else:
            labels_unique.append([100, 'others'])
    return labels, labels_unique, color_table_tensor

def write_to_json_seg(segfile, scanId, kthr, segMinVerts, index):
    filename = segfile + scanId + '_' + str(kthr) + '_' + str(segMinVerts) + '.seg.json'
    '''
    In segmentator C++ file use the unordered_set to distinct
    index_unique = torch.unique(index)
    num = index_unique.numel()
    '''
    # no distinct
    num = index.numel()
    
    with open(filename, 'w') as f:
        data = '{\"params\":{\"kThresh\":'+str(kthr)+',\"segMinVerts\":'+str(segMinVerts)+'},'+\
        '\"sceneId\":\"'+str(scanId)+'\",'+'\"segIndices\":['
        f.write(data)
        f.close()
    
    with open(filename, 'a') as f:
        for i in range(num):
            if i > 0:
                f.write(','+str(index[i].item()))
                # f.write(','+str(index_unique[i].item()))
            else:
                f.write(str(index[i].item()))
                # f.write(str(index_unique[i].item()))
        f.write(']}')
        f.close()

def write_to_json_agg(aggfile, scanId, labels_unique):
    filename = aggfile + scanId + 'aggregation.json'

    # NOTE merge the instance LABEL !!!!!
    results = []
    for i in labels_unique:
        flag = False
        for j in results:
            if i[1] == j[1]:
                j[0].append(i[0])
                flag = True
                break
        if not flag:
            results.append([[i[0]], i[1]])

    with open(filename, 'w') as f:
        data = '{\"segGroups\":['
        f.write(data)
        f.close()
    with open(filename, 'a') as f:
        length_res = len(results)
        cnt = 0
        for i in results:
            f.write('{\"segments\":[')
            length = len(i[0])
            for j in range(length):
                if j > 0:
                    f.write(',' + str(i[0][j]))
                else:
                    f.write(str(i[0][j]))
            f.write('],\"label\":\"' + i[1] + '\"}')
            cnt += 1
            if cnt != length_res:
                f.write(',')
        f.write(']}')

def load_align(alignfile):
    # Load scene axis alignment matrix
    lines = open(alignfile).readlines()
    # test set data doesn't have align_matrix
    axis_align_matrix = np.eye(4)
    for line in lines:
        if 'axisAlignment' in line:
            axis_align_matrix = [
                float(x)
                for x in line.rstrip().strip('axisAlignment = ').split(' ')
            ]
            break
    axis_align_matrix = np.array(axis_align_matrix).reshape((4, 4))

def remove_points(points, bound):
    # Boundary condition
    minX = bound['minX']
    maxX = bound['maxX']
    minY = bound['minY']
    maxY = bound['maxY']
    minZ = bound['minZ']
    maxZ = bound['maxZ']
    # Remove the point out of range x,y,z
    mask = np.where((points[:, 0] >= minX) & (points[:, 0] <= maxX) & (points[:, 1] >= minY) & (
            points[:, 1] <= maxY) & (points[:, 2] >= minZ) & (points[:, 2] <= maxZ))
    # points = points[mask]
    return mask

def pointcloud_chunking(save_dir, file_path, voxel_size):
    # create directory
    os.makedirs(save_dir, exist_ok=True)
    # chunk size
    pcd = o3d.io.read_point_cloud(file_path)
    # get original points and colors
    points = np.array(pcd.points)
    print(f'[Info] Original Points Number: {points.shape}')
    colors = np.array(pcd.colors)
    real_bound = [np.min(points, axis=0), np.max(points, axis=0)]
    print(f'[Info] Original PointCloud Bound: {real_bound}')
    elastic_bound = [np.min(points, axis=0) - 0.01, np.max(points, axis=0) + 0.01]
    print(f'[Info] Elastic Bound: {elastic_bound}')
    # The bounding box size of the pointcloud
    bound_lenth = elastic_bound[1] - elastic_bound[0] + 0.2
    print(f'[Info] Bound Length: {bound_lenth}')
    voxel_number = [bound_lenth[i] / voxel_size[i] for i in range(3)]
    print(f'[Info] Voxel Number: {voxel_number}')
    voxel_number = np.ceil(voxel_number).astype(np.int32)
    print(f'[Info] Voxel Number: {voxel_number}')
    bound = {'minX': -10, 'maxX': 10, 'minY': -10, 'maxY': 10, 'minZ': -10, 'maxZ': 10}
    n = 1
    N = voxel_number.prod()
    validate_points = 0
    # CHUNKING
    for i in range(voxel_number[0]):
        bound['minX'] = elastic_bound[0][0] + voxel_size[0] * i
        # bound['maxX'] = elastic_bound[0][0] + voxel_size[0] * (i + 1) + 0.01
        bound['maxX'] = min(elastic_bound[0][0] + voxel_size[0] * (i + 1), elastic_bound[1][0])
        for j in range(voxel_number[1]):
            bound['minY'] = elastic_bound[0][1] + voxel_size[1] * j
            # bound['maxY'] = elastic_bound[0][1] + voxel_size[1] * (j + 1)
            bound['maxY'] = min(elastic_bound[0][1] + voxel_size[1] * (j + 1), elastic_bound[1][1])
            for k in range(voxel_number[2]):
                bound['minZ'] = elastic_bound[0][2] + voxel_size[0] * k
                bound['maxZ'] = min(elastic_bound[0][2] + voxel_size[2] * (k + 1), elastic_bound[1][-1])
                mask = remove_points(points, bound)
                sub_pcd = o3d.geometry.PointCloud()
                sub_pcd.points = o3d.utility.Vector3dVector(points[mask])
                sub_pcd.colors = o3d.utility.Vector3dVector(colors[mask])
                save_path = f'{i:02d}-{j:02d}-{k:02d}.pcd'
                sub_pcd_points_num = np.asarray(sub_pcd.points).size / 3
                print(f'[Info] --------Current / Total : {n} / {N}, File Name: {save_path}')
                if sub_pcd_points_num >= 3:
                    o3d.io.write_point_cloud(save_dir + save_path, sub_pcd , write_ascii=True)
                    print(f'[Info] Subpcd Points Number: {sub_pcd_points_num}')
                    # print('!!!ZERO')
                else:
                    print('[Info] !!!SKIP!!! Because PointCloud Number is NOT satisfied.')
                validate_points += sub_pcd_points_num
                n += 1
    print(f'[Info] Validate Points: {validate_points} / {points.size / 3}')

def calculate_density(file_path):
    # get the bound
    pcd = o3d.io.read_point_cloud(file_path)
    points = np.array(pcd.points)
    points_num = (points.size / 3) * 1.0
    print(f'[Info] Original Points Number: {points_num}')
    bound = [np.min(points, axis=0), np.max(points, axis=0)]
    print(f'[Info] Bound: {bound}')
    bound_length = bound[1] - bound[0]
    print(f'[Info] Bound Length: {bound_length}')
    # get the volume
    volume = 1
    for i in range(len(bound_length)):
        volume *= bound_length[i]
    print(f'[Info] Volume: {volume}')
    distance = pcd.compute_nearest_neighbor_distance()
    print(f'[Info] Nearest Neighbor Distance: {np.mean(distance)}')
    density = points_num / volume
    print(f'[Info] Density: {density}')

    return density

def judge_file_size(file_path, SIZE):
    stats = os.stat(file_path)
    file_size = stats.st_size
    if file_size >= SIZE:
        return True
    else:
        return False
    
def judge_points_size(pts, SIZE):
    if pts >= SIZE:
        return True
    else:
        return False

def reconstruction_method(pcd, mode, filename):
    alpha = 0.03 # ONLY for alpha shape
    depth = 9 # ONLY for possion
    start_time = time.time()
    if mode == 'b':
        radii = get_radii(pcd)
        ball_pivoting(pcd, radii, filename)
    elif mode == 'p':
        possion(pcd, depth, filename)
    elif mode == 'a':
        alpha_shapes(pcd, alpha, filename)
    end_time = time.time()

    consume_time = end_time - start_time
    print('Time consuming: {:.2f} seconds'.format(consume_time))
    return 1
    

def chunking_files_processing(path_, mode):
    files = os.listdir(path_)
    totalnum = len(files)
    cnt = 0
    voxel_size = 0.04 # ONLY for down sampling

    for filename in files:
        if filename.endswith(".pcd"):
            file_path = os.path.join(path_, filename)
            pcd = o3d.io.read_point_cloud(file_path)
            cnt += 1
            print(f'--------Current File---------: {filename}, Total: {cnt} / {totalnum}')
            original_points = np.asarray(pcd.points).size / 3
            print(f'[Info] Original Points: {original_points}')

            # if judge_points_size(original_points, MAX_POINTS_SIZE):
            #     # down sampling
            #     pcd = down_sample_voxel(originalpcd, voxel_size, 0)
            #     print(f'[Info] Down Sampling Points: {np.asarray(pcd.points).size / 3}')
        
            # reconstruction
            reconstruction_method(pcd, mode, filename)
