import numpy as np
import open3d as o3d
import os
from sklearn.neighbors import NearestNeighbors
import sys
import torch
from tqdm import trange
import trimesh

sys.path.append("../..")

from data_processing.smplx import smplx
from data_processing.utils.time_align import txt_to_paried_frameids
from data_processing.utils.VTS_object import get_obj_info

def read_SMPLX_params(data_dir, M, N, device):
    SMPLX_params = []
    for i in range(0, N, M):
        top_i = min(i + M, N)
        result_dir = os.path.join(data_dir, "{}to{}.npz".format(str(i), str(top_i - 1)))
        result = np.load(result_dir, allow_pickle=True)["results"].item()
        for j in range(top_i - i):
            SMPLX_params.append({
                "betas": result["betas"][j:j+1].to(device),
                "body_pose": result["body_pose"][j:j+1].to(device),
                "transl": result["transl"][j:j+1].to(device),
                "global_orient": result["global_orient"][j:j+1].to(device),
                "left_hand_pose": result["left_hand_pose"][j:j+1].to(device),
                "right_hand_pose": result["right_hand_pose"][j:j+1].to(device),
            })
    return SMPLX_params

def compute_valid(source_vertices, target_vertices, threshould=0.5):
    """
    source_vertices: torch.tensor, shape = (N, 3)
    target_vertices: torch.tensor, shape = (M, 3)
    
    return:
    contact: torch.tensor, shape = (N), bool
    dist: torch.tensor, shape = (N)
    closest_point: torch.tensor, shape = (N)
    """
    # knn = nn.DataParallel(KNN(k=1, transpose_mode=True), device_ids=[1, 2])
    knn = NearestNeighbors(n_neighbors=1).fit(target_vertices)
    # source_to_target contact
    # print(target_vertices.type, source_vertices.type())
    # dist, closest_point = knn(target_vertices.unsqueeze(0), source_vertices.unsqueeze(0))
    # print(source_vertices.shape, target_vertices.shape)
    # dist, closest_point = knn(target_vertices, source_vertices)
    dist, indices = knn.kneighbors(source_vertices)
    valid = dist.squeeze() < threshould
    
    return valid

dataset_dir = "/share/datasets/hhodataset/"
obj_dataset_dir = "/data3/datasets/HHO_object_dataset_final"
date = "20231020"
device = "cuda:1"

for clip in os.listdir(os.path.join(dataset_dir, "VTS", date)):
    try:
        data_dir = os.path.join(dataset_dir, "VTS", date, clip)
        paired_frames = txt_to_paried_frameids(os.path.join(data_dir, "aligned_frame_ids.txt"))
        obj_name, obj_model_path = get_obj_info(data_dir, obj_dataset_dir)
        obj2world = np.load(os.path.join(data_dir, "aligned_objposes.npy"))
        object_data = {
            "model_path": obj_model_path,
            "mesh": trimesh.load_mesh(obj_model_path),
            "obj2world": obj2world,
        }
        
        VTS_data = np.load(os.path.join(data_dir, "VTS_data.npz"), allow_pickle=True)["data"].item()
        VTS_data = np.load(os.path.join(data_dir, "VTS_data.npz"), allow_pickle=True)["data"].item()
        person1_list, person1_SMPLX_params_list, person2_list, person2_SMPLX_params_list, rigid_pose_list = None, None, None, None, None
        if "/joints" in VTS_data:
            # person1_list = VTS_data["/joints"]  # VTS person1 data
            person1_SMPLX_params_list = read_SMPLX_params(os.path.join(data_dir, "SMPLX_fitting", "person_1"), M=50, N=len(paired_frames), device=device)  # optimized SMPLX params
        if "/joints2" in VTS_data:
            person2_list = VTS_data["/joints2"]  # VTS person2 data
            person2_SMPLX_params_list = read_SMPLX_params(os.path.join(data_dir, "SMPLX_fitting", "person_2"), M=50, N=len(paired_frames), device=device)  # optimized SMPLX params
        if not object_data is None:
            rigid_pose_list = object_data["obj2world"]  # VTS aligned object pose``
            
        print(clip)
        os.makedirs(os.path.join(dataset_dir, "VTS", date, clip, "denoised_segmented_pc"), exist_ok=True)
    except Exception as e:
        print(e)
            
    for paired_frame_idx in trange(0, len(paired_frames), 3):
        try:
            person1_SMPLX_params = person1_SMPLX_params_list[paired_frame_idx]
            person2_SMPLX_params = person2_SMPLX_params_list[paired_frame_idx]
            
            ori_mesh, obj2world = object_data["mesh"], object_data["obj2world"][paired_frame_idx]
            vertices, faces = ori_mesh.vertices, ori_mesh.faces
            vertices = vertices @ obj2world[:3, :3].T + obj2world[:3, 3]  # object coord -> world coord
            obj_mesh = trimesh.Trimesh(vertices=vertices, faces=faces)
            obj_pc = o3d.io.read_point_cloud(os.path.join(data_dir, "segmented_pc", f"object_{paired_frame_idx}.ply"))
            
            obj_valid = compute_valid(obj_pc.points, obj_mesh.vertices, threshould=0.1)
            obj_pc.points = o3d.utility.Vector3dVector(np.asarray(obj_pc.points)[obj_valid])
            obj_pc.colors = o3d.utility.Vector3dVector(np.asarray(obj_pc.colors)[obj_valid])
            
            num_pca_comps = person1_SMPLX_params["left_hand_pose"].shape[1]
            smplx_model = smplx.create("/share/human_model/models", model_type="smplx", gender="neutral", use_face_contour=False, num_betas=10, num_expression_coeffs=10, ext="npz", use_pca=True, num_pca_comps=num_pca_comps, flat_hand_mean=True)
            smplx_model.to(device)
            
            expression = torch.zeros([1, smplx_model.num_expression_coeffs], dtype=torch.float32).to(device)
            
            betas, body_pose, transl, global_orient, left_hand_pose, right_hand_pose = person1_SMPLX_params["betas"], person1_SMPLX_params["body_pose"], person1_SMPLX_params["transl"], person1_SMPLX_params["global_orient"], person1_SMPLX_params["left_hand_pose"], person1_SMPLX_params["right_hand_pose"]
            result_model = smplx_model(betas=betas, expression=expression, global_orient=global_orient, transl=transl, body_pose=body_pose, left_hand_pose=left_hand_pose, right_hand_pose=right_hand_pose, return_verts=True)
            result_vertices = result_model.vertices.detach().cpu().numpy()[0]
            result_joints = result_model.joints.detach().cpu().numpy()[0]
            faces = result_model.faces.detach().cpu().numpy()
            smplx_mesh = trimesh.Trimesh(vertices=result_vertices, faces=faces)
            pc1 = o3d.io.read_point_cloud(os.path.join(data_dir, "segmented_pc", f"person1_{paired_frame_idx}.ply"))
            valid1 = compute_valid(pc1.points, smplx_mesh.vertices, threshould=0.1)
            pc1.points = o3d.utility.Vector3dVector(np.asarray(pc1.points)[valid1])
            pc1.colors = o3d.utility.Vector3dVector(np.asarray(pc1.colors)[valid1])
            
            
            betas, body_pose, transl, global_orient, left_hand_pose, right_hand_pose = person2_SMPLX_params["betas"], person2_SMPLX_params["body_pose"], person2_SMPLX_params["transl"], person2_SMPLX_params["global_orient"], person2_SMPLX_params["left_hand_pose"], person2_SMPLX_params["right_hand_pose"]
            result_model = smplx_model(betas=betas, expression=expression, global_orient=global_orient, transl=transl, body_pose=body_pose, left_hand_pose=left_hand_pose, right_hand_pose=right_hand_pose, return_verts=True)
            result_vertices = result_model.vertices.detach().cpu().numpy()[0]
            result_joints = result_model.joints.detach().cpu().numpy()[0]
            faces = result_model.faces.detach().cpu().numpy()
            smplx_mesh = trimesh.Trimesh(vertices=result_vertices, faces=faces)
            pc2 = o3d.io.read_point_cloud(os.path.join(data_dir, "segmented_pc", f"person2_{paired_frame_idx}.ply"))
            valid2 = compute_valid(pc2.points, smplx_mesh.vertices, threshould=0.1)
            pc2.points = o3d.utility.Vector3dVector(np.asarray(pc2.points)[valid2])
            pc2.colors = o3d.utility.Vector3dVector(np.asarray(pc2.colors)[valid2])
            
            if obj_valid.sum() < 1000 or valid1.sum() < 1000 or valid2.sum() < 1000:
                print("skipping...")
                continue
            o3d.io.write_point_cloud(os.path.join(data_dir, "denoised_segmented_pc", f"person1_{paired_frame_idx}.ply"), pc1)
            o3d.io.write_point_cloud(os.path.join(data_dir, "denoised_segmented_pc", f"person2_{paired_frame_idx}.ply"), pc2)
            o3d.io.write_point_cloud(os.path.join(data_dir, "denoised_segmented_pc", f"object_{paired_frame_idx}.ply"), obj_pc)
        except Exception as e:
            print(e)