import open3d as o3d
import numpy as np
import torch
from myutils import *


def off2obj(inF, outF, calculateNormal=True):
    '''
    Off mesh to Obj mesh
    --
    inF :    input file dir
    outF :   output file dir
    calculateNormal : s
    ---
    Example
    ---
    ```
    # read all .off and parse to .obj
    import glob
    import os.path
    absp = os.path.abspath('../client/bin/off/')
    infs = glob.glob(rf'{absp}/*.off')                  # input files
    outfs = [f.replace('.off', '.obj') for f in infs]   # output files
    for i, _ in enumerate(infs):
        off2obj(infs[i], outfs[i])
    ```
    '''
    mesh = o3d.io.read_triangle_mesh(inF)
    if calculateNormal:
        mesh.compute_vertex_normals()
    o3d.io.write_triangle_mesh(outF, mesh)

def obj2off(inF, outF):
    mesh = o3d.io.read_triangle_mesh(inF)
    o3d.io.write_triangle_mesh(outF, mesh)

def obj2txt(inF, outF):
    mesh = o3d.io.read_triangle_mesh(inF)
    vert = np.asarray(mesh.vertices)
    vert = normalization(vert)
    norm = np.asarray(mesh.vertex_normals)
    with open(outF, 'w') as f:
        for i, _ in enumerate(vert):
            f.write(f'{vert[i][0]},{vert[i][1]},{vert[i][2]},{norm[i][0]},{norm[i][1]},{norm[i][2]}\n')
        f.flush()

def normalization(data):
    _range = np.max(abs(data))
    return data / _range

def obj2point_list(inF):
    mesh = o3d.io.read_triangle_mesh(inF)
    vert = np.asarray(mesh.vertices)
    vert = normalization(vert)
    norm = np.asarray(mesh.vertex_normals)
    norm = normalization(norm)
    pl=[]
    for i,_ in enumerate(vert):
        for v in vert[i]:
            pl.append(v)
        for n in norm[i]:
            pl.append(n)
    return pl


def farthest_point_sample_torch(xyz, npoint):

    """
    Input:
        xyz: pointcloud data, [B, N, 3]
        npoint: number of samples
    Return:
        centroids: sampled pointcloud index, [B, npoint]
    """
    if torch.cuda.is_available():
        xyz = xyz.to("cuda")

    while xyz.dim() < 3:
        xyz = xyz.unsqueeze(0)

    device = xyz.device
    B, N, C = xyz.shape

    centroids = torch.zeros(B, npoint, dtype=torch.float).to(device)     # 采样点矩阵（B, npoint）
    distance = torch.ones(B, N, dtype=torch.float).to(device) * 1e10     # 采样点到所有点距离（B, N）

    batch_indices = torch.arange(B, dtype=torch.long).to(device)        # batch_size 数组

    #farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)  # 初始时随机选择一点

    # barycenter = torch.sum((xyz), 1)                                    #计算重心坐标 及 距离重心最远的点
    # barycenter = barycenter/xyz.shape[1]
    barycenter = torch.mean(xyz, dim=1, keepdim=True) #[B, 1, 3]
    # barycenter = barycenter.view(B, 1, 3)

    dist = torch.sum((xyz - barycenter) ** 2, -1, dtype=torch.float) # [B, N, 1]
    farthest = torch.max(dist,1)[1]                                     #将距离重心最远的点作为第一个点

    for i in range(npoint):
        # print("-------------------------------------------------------")
        # print("The %d farthest pts %s " % (i, farthest))
        centroids[:, i] = farthest                                      # 更新第i个最远点
        centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)        # 取出这个最远点的xyz坐标
        dist = torch.sum((xyz - centroid) ** 2, -1, dtype=torch.float)                     # 计算点集中的所有点到这个最远点的欧式距离
        # print("dist    : ", dist)
        mask = dist < distance
        # print("mask %i : %s" % (i,mask))
        distance[mask] = dist[mask]                                     # 更新distance，记录样本中每个点距离所有已出现的采样点的最小距离
        # print("distance: ", distance)

        farthest = torch.max(distance, -1)[1]                           # 返回最远点索引

    return centroids.cpu().int().tolist()


def farthest_point_sample(vert:np.array, norm:np.array, npoint, seed=0):
    # N, D = vert.shape
    # xyz = vert[:,:3]
    # centroids = np.zeros((npoint,))
    # distance = np.ones((N,)) * 1e10
    # np.random.seed(seed)
    # farthest = np.random.randint(0, N)
    # for i in range(npoint):
    #     centroids[i] = farthest
    #     centroid = xyz[farthest, :]
    #     dist = np.sum((xyz - centroid) ** 2, -1)
    #     mask = dist < distance
    #     distance[mask] = dist[mask]
    #     farthest = np.argmax(distance, -1)
    # vert = vert[centroids.astype(np.int32)]
    # norm = norm[centroids.astype(np.int32)]
    centroids = farthest_point_sample_torch(torch.tensor(vert), npoint)
    vert = vert[centroids[0],:]
    norm = norm[centroids[0],:]
    return vert, norm


def mesh2pcd(inMeshFile, nsamples=2048, doNormalize=True):
    '''
    read mesh, do up sampling(loop subdivide) until nvertices >= 2048
    do down sampling(farthest point sample) to npoints == 2048
    param
    meshFile: mesh filepath
    nsamples: num of fp sampling
    return
    pcd: point cloud [o3d.geometry.PointCloud()]
    '''
    mesh = o3d.io.read_triangle_mesh(filename=inMeshFile, print_progress=True)
    if len(mesh.vertices) < 3:
        raise ValueError(f'Open3D Failed to Load Mesh "{inMeshFile}" (Vertex Num < 3).')
    if not mesh.has_vertex_normals():
        mesh.compute_vertex_normals()
    loop_lim = 16
    loop_count = 1
    while len(mesh.vertices) < nsamples:
        if loop_count >= loop_lim:
            raise ValueError(f'Open3D Failed to Loop-Subdivide Mesh "{inMeshFile}" (Reach Limit Times {loop_lim}).')
        logger.info(f'mesh: nVerts = {len(mesh.vertices)}, run the {loop_count} times loop subdividing...')
        mesh = mesh.subdivide_loop(1)
        loop_count += 1
    logger.info(f'mesh: nVerts = {len(mesh.vertices)} > {nsamples}, done subdividing...')
    points = np.asarray(mesh.vertices)
    points = normalization(points)
    norm = np.asarray(mesh.vertex_normals)
    norm = normalization(norm)
    # print(f'nPoints={len(points)}, run fp sampling...')
    points, norm = farthest_point_sample(points, norm, nsamples)
    if doNormalize: points /= np.max(abs(points))
    pcd = o3d.geometry.PointCloud()
    pcd.points = o3d.utility.Vector3dVector(points)
    pcd.normals = o3d.utility.Vector3dVector(norm)
    return pcd


def pcd2plist(pcd):
    p = np.array(pcd.points,dtype=np.float64)
    n = np.array(pcd.normals,dtype=np.float64)
    plist = []
    for i, _ in enumerate(p):
        for j in range(3):
            plist.append(p[i][j])
        for j in range(3):
            plist.append(n[i][j])
    return plist


def pcd2txt(outFileName, pcd):
    pcd_txt = np.hstack((np.array(pcd.points), np.array(pcd.normals)))
    np.savetxt(outFileName, pcd_txt)

# import glob
# import os.path
# absp = os.path.abspath('../client/bin/model/')
# infs = glob.glob(rf'{absp}/*.obj')
# from concurrent.futures import ThreadPoolExecutor, wait
# import multiprocessing
# import time
# start = time.perf_counter()
# with ThreadPoolExecutor(max_workers=multiprocessing.cpu_count() - 2) as tp:
#     fts = [tp.submit(mesh2pcd, f) for f in infs]
#     wait(fts)
# print(f'{time.perf_counter()-start:.2f}')

# pcd = mesh2pcd("cache/Qma3crJ2cwrESHuXN6FiHySL3voCxy718y2sjg8rwv4qnc.obj")
# pl=pcd2plist(pcd)
# print(pl)
# o3d.visualization.draw_geometries([pcd])


# # txt_path = '../client/bin/off/airplane_0627.txt'
# txt_path = './cache/airplane_0627.txt'
# # 通过numpy读取txt点云
# pcd = np.genfromtxt(txt_path, delimiter=",")
# pcd_vector = o3d.geometry.PointCloud()
# # 加载点坐标
# pcd_vector.points = o3d.utility.Vector3dVector(pcd[:, :3])
# pcd_vector.normals = o3d.utility.Vector3dVector(pcd[:, 3:6])
# o3d.visualization.draw_geometries([pcd_vector])
