import os
from os.path import join, isfile, dirname, basename
import sys
sys.path.append("/home/liuyun/HHO-dataset/data_processing/Tink")
sys.path.append("/home/liuyun/HHO-dataset/data_processing/")
sys.path.append("/home/liuyun/codebases/HHO_VAE/")
import argparse
import numpy as np
import pickle
import torch
from torch import nn
import pytorch3d
import pytorch3d.io as IO
import trimesh
from smplx import smplx
from utils.VTS_object import get_obj_info
from utils.load_smplx_params import load_multiperson_smplx_params
from utils.contact import compute_contact_and_closest_point
import open3d as o3d
from copy import deepcopy

from optimization.bvh2smplx import Simple_SMPLX, create_SMPLX_model

def find_closest_point(p, vert):
    """
    p: a point, shape = (3,)
    vert: a point cloud, shape = (N, 3)
    
    return: the closest point index
    """
    dist = (((vert - p.reshape(1, 3))**2).sum(axis=-1))**(0.5)  # shape = (N,)
    return dist.argmin()

def compute_contact_info(human_params, smplx_model, idx, obj_vertices, threshould=0.05, device="cuda:0"):
    """
    [input]
    * human_params: SMPLX params
    * idx: frame idx
    * obj_vertices: torch.float32, shape = (M, 3)
    
    human SMPLX mesh: shape = (N, 3)
    
    [return]
    * contact: torch.bool, shape = (N)
    * dist: torch.float32, shape = (N)
    * closest_point: torch.int64, shape = (N)
    """
    result_model = smplx_model(betas=human_params["betas"][idx:idx+1].detach().to(device), expression=human_params["expression"][idx:idx+1].detach().to(device), global_orient=human_params["global_orient"][idx:idx+1].detach().to(device), transl=human_params["transl"][idx:idx+1].detach().to(device), body_pose=human_params["body_pose"][idx:idx+1].detach().to(device), left_hand_pose=human_params["left_hand_pose"][idx:idx+1].detach().to(device), right_hand_pose=human_params["right_hand_pose"][idx:idx+1].detach().to(device), return_verts=True)
    human_vertices = result_model.vertices[0]  # human vertices
    contact, dist, closest_point = compute_contact_and_closest_point(human_vertices, obj_vertices, threshould=threshould)
    return contact, dist, closest_point

HAND_VERT_IDS = {
    'lthumb':		5361,
    'lindex':		4933,
    'lmiddle':		5058,
    'lring':		5169,
    'lpinky':		5286,
    'rthumb':		8079,
    'rindex':		7669,
    'rmiddle':		7794,
    'rring':		7905,
    'rpinky':		8022,
}

LEFT_VERT_IDS = {
    'lthumb':		5361,
    'lindex':		4933,
    'lmiddle':		5058,
    'lring':		5169,
    'lpinky':		5286,
}

RIGHT_VERT_IDS = {
    'rthumb':		8079,
    'rindex':		7669,
    'rmiddle':		7794,
    'rring':		7905,
    'rpinky':		8022,
}

def parse_args():
    parser = argparse.ArgumentParser()
    ############# 数据 ########################
    # parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230807_2/002")  # 搬桌子
    # parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230807_2/004")  # 旋转桌子
    parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230807_1/017")  # 搬棍子
    # parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230805_1/002")  # 旋转椅子
    parser.add_argument('--start_frame', '-s', type=int, default=0)
    parser.add_argument('--end_frame', '-e', type=int, default=300)
    parser.add_argument('--save_filename', type=str, default="compare_result.mp4")
    parser.add_argument('--device', type=str, default="cuda:0")
    args = parser.parse_args()
    return args

if __name__ == '__main__':
    
    #########################################################################################
    obj_dataset_dir = "/data3/datasets/HHO_object_dataset_final/"
    hho_dataset_dir = "/share/datasets/hhodataset/VTS/"
    cfg = {
            "retarget_person1": True,
            "retarget_person2": True,
    }
    args = parse_args()

    object_contact_pool = {}

            
    # data_dir = "/share/datasets/hhodataset/VTS/20231030/092"
    data_dir = "/share/datasets/hhodataset/VTS/20231030/036/"
    
    # for obj_cat_dir in os.listdir(obj_dataset_dir):
    #     if isfile(join(obj_dataset_dir, obj_cat_dir)):
    #         continue
    #     for obj_name in os.listdir(join(obj_dataset_dir, obj_cat_dir)):
    #         obj_data_path = join(obj_dataset_dir, obj_cat_dir, obj_name)
    #         obj_name = obj_name.lower()

    #         contact_pool_file = join(obj_data_path, "contact_pool.npy")
    #         mesh_file = join(obj_data_path, f"{obj_name}_m.obj")
    #         if isfile(contact_pool_file) and isfile(mesh_file):
    #             obj_mesh = trimesh.load_mesh(mesh_file)
    #             obj_vert, obj_face = obj_mesh.vertices, obj_mesh.faces
    #             contact_pool = np.load(contact_pool_file, allow_pickle=True).item()
    #             new_cp = {  # 记录每个人每只手全程和哪些oakink的最近点接触
    #                 "person1": {"left_hand": [], "right_hand": [],},
    #                 "person2": {"left_hand": [], "right_hand": [],},
    #             }
    #             for person in contact_pool:
    #                 for hand in contact_pool[person]:
    #                     sorted_pool = sorted(contact_pool[person][hand], key=lambda x: obj_vert[x].mean(axis=0)[0])
    #                     for p in sorted_pool:
    #                         mean_p = obj_vert[p].mean(axis=0)
    #                         new_index = find_closest_point(mean_p, obj_vert)
    #                         new_cp[person][hand].append(new_index)
    #             print("down", obj_name)
    #             np.save(join(obj_data_path, "sorted_pool.npy"), new_cp)

    obj_name, obj_data_path = get_obj_info(data_dir, obj_dataset_dir)
    print(obj_name, obj_data_path)

    
    
    obj_name = obj_name.lower()
    start_frame = 0
    end_frame = 300
    device = "cuda:0"

    if len(np.load(join(data_dir, 'aligned_objposes.npy'), allow_pickle=True)) < end_frame:
        end_frame = len(np.load(join(data_dir, 'aligned_objposes.npy'), allow_pickle=True))
        print("Warning: end_frame is larger than the length of object_result, set end_frame to {}".format(str(end_frame)))

    try:
        multiperson_SMPLX_params = load_multiperson_smplx_params(join(data_dir, "SMPLX_fitting"), start_frame=start_frame, end_frame=end_frame, device=device)
    except Exception as e:
        raise e
    
    obj_mesh = trimesh.load_mesh(obj_data_path)
    obj_vert, obj_face = obj_mesh.vertices, obj_mesh.faces

    object_dir = join(data_dir, 'aligned_objposes.npy')
    origin_pose = np.load(object_dir, allow_pickle=True)[start_frame:end_frame]
    origin_vert_seq = (np.array(origin_pose[:, :3, :3] @ obj_vert.T)).transpose(0, 2, 1) + np.expand_dims(origin_pose[:, :3, 3], axis=1)
    origin_vert_seq = torch.from_numpy(origin_vert_seq).to(device)  # (N, 3)
    
    origin_contact_info = {
        "person1": {"contact": [], "dist": [], "closest_point": []},
        "person2": {"contact": [], "dist": [], "closest_point": []},
    }
    contact_threshould=0.05
    # contact_threshould = 0.01
    smplx_model = create_SMPLX_model(use_pca=True, num_pca_comps=12, batch_size=1, device=device)

    for idx in range(0, end_frame - start_frame):
        # person1 to original obj
        
        contact, dist, closest_point = compute_contact_info(multiperson_SMPLX_params["person1"], smplx_model, idx, origin_vert_seq[idx], threshould=contact_threshould, device=device)
        origin_contact_info["person1"]["contact"].append(contact.detach().cpu().numpy())
        origin_contact_info["person1"]["dist"].append(dist.detach().cpu().numpy())
        origin_contact_info["person1"]["closest_point"].append(closest_point.detach().cpu().numpy())
        
        # person2 to original obj
        contact, dist, closest_point = compute_contact_info(multiperson_SMPLX_params["person2"], smplx_model, idx, origin_vert_seq[idx], threshould=contact_threshould, device=device)
        origin_contact_info["person2"]["contact"].append(contact.detach().cpu().numpy())
        origin_contact_info["person2"]["dist"].append(dist.detach().cpu().numpy())
        origin_contact_info["person2"]["closest_point"].append(closest_point.detach().cpu().numpy())
        

    for person in origin_contact_info:
        origin_contact_info[person]["contact"] = torch.tensor(origin_contact_info[person]["contact"], dtype=torch.bool).to(device)
        origin_contact_info[person]["dist"] = torch.tensor(origin_contact_info[person]["dist"], dtype=torch.float32).to(device)
        origin_contact_info[person]["closest_point"] = torch.tensor(origin_contact_info[person]["closest_point"], dtype=torch.int64).to(device)
    print("finish preparing contact areas !!!")
    print(origin_contact_info["person1"]["contact"].shape, origin_contact_info["person1"]["dist"].shape, origin_contact_info["person1"]["closest_point"].shape)


    contact_points = {
            "person1": {"dist": [], "closest_point": [], "contact": []},
            "person2": {"dist": [], "closest_point": [], "contact": []},
        }

    hand_vertex_indices = list(HAND_VERT_IDS.values())
    assert len(hand_vertex_indices) == 10

    for person in contact_points:
        contact_points[person]["dist"] = origin_contact_info[person]["dist"][:, list(HAND_VERT_IDS.values())].detach().cpu().numpy() # N x 10
        contact_points[person]["closest_point"] = origin_contact_info[person]["closest_point"][:, list(HAND_VERT_IDS.values())].detach().cpu().numpy() # N x 10
        contact_points[person]["contact"] = origin_contact_info[person]["contact"][:, list(HAND_VERT_IDS.values())].detach().cpu().numpy()

    # contact_pool = np.load(join(os.path.abspath(join(obj_data_path, "..")), "contact_pool.npy"), allow_pickle=True).item()
    contact_pool = np.load(join(os.path.abspath(join(obj_data_path, "..")), "sorted_pool.npy"), allow_pickle=True).item()
    person1_left, person1_right = contact_pool["person1"]["left_hand"], contact_pool["person1"]["right_hand"]
    person2_left, person2_right = contact_pool["person2"]["left_hand"], contact_pool["person2"]["right_hand"]
    
    new_contact_info_from_oakink = contact_points.copy()
    contact_point_candidates = {  # 记录每个人每只手全程和哪些oakink的最近点接触
        "person1": {"left_hand": [], "right_hand": [],},
        "person2": {"left_hand": [], "right_hand": [],},
    }

    for person in new_contact_info_from_oakink:
        left_hand_contact_flag = new_contact_info_from_oakink[person]["contact"][:, :5].sum(axis=-1) > 0  # shape = (N_frame,), 有一个指尖接触到物体就算整只手存在contact
        right_hand_contact_flag = new_contact_info_from_oakink[person]["contact"][:, 5:].sum(axis=-1) > 0  # shape = (N_frame,), 有一个指尖接触到物体就算整只手存在contact
        for idx in range(start_frame, end_frame):
            if left_hand_contact_flag[idx]:
                contact_point_candidates[person]["left_hand"].append(new_contact_info_from_oakink[person]["closest_point"][idx, :5])
            if right_hand_contact_flag[idx]:
                contact_point_candidates[person]["right_hand"].append(new_contact_info_from_oakink[person]["closest_point"][idx, 5:])

    pcd = o3d.geometry.PointCloud()
    pcd.points = o3d.utility.Vector3dVector(np.asarray(obj_vert))
    pcd_tree = o3d.geometry.KDTreeFlann(pcd)

    print(len(person1_left), len(person1_right), len(person2_left), len(person2_right))


    person1_left_sample = person1_left[np.random.randint(len(person1_left))]
    person1_right_sample = person1_right[np.random.randint(len(person1_right))]
    person2_left_sample = person2_left[np.random.randint(len(person2_left))]
    person2_right_sample = person2_right[np.random.randint(len(person2_right))]
    contact_pool_sample = {
        "person1": {
            "left_hand": person1_left_sample, 
            "right_hand": person1_right_sample
        }, 
        "person2": {
            "left_hand": person2_left_sample, 
            "right_hand": person2_right_sample
        }}

    contact_info_sample = deepcopy(new_contact_info_from_oakink)
    for person in contact_point_candidates:
            for hand_name in contact_point_candidates[person]:
                N_candidate = len(contact_point_candidates[person][hand_name])
                if N_candidate == 0:
                    continue
                k = np.random.randint(0, N_candidate)
                candidate = contact_point_candidates[person][hand_name][k]
                init_point = obj_vert[candidate].mean(axis=0)
                init_index = find_closest_point(init_point, obj_vert)
                if hand_name == "left_hand":
                    contact_info_sample[person]["closest_point"][:, :5] = init_index
                else:
                    contact_info_sample[person]["closest_point"][:, 5:] = init_index

    v = 1.0
    score = 0.0
    direction = np.random.randint(-1, 2, 3)
    print(direction)
    while True:
        
        pcd.paint_uniform_color([0, 0, 1])
        color = np.asarray(pcd.colors)
        temp_color = color.copy()
        pace = np.random.uniform(0, 0.05, 3)
        contact_move = pace * direction * v * score
        for person in contact_info_sample:
            for hand_name in ["left_hand", "right_hand"]:
                N_candidate = len(contact_point_candidates[person][hand_name])
                if N_candidate == 0:
                    continue            
                if hand_name == "left_hand":
                    vert_idx = contact_info_sample[person]["closest_point"][0, 0]
                    vert_p = obj_vert[vert_idx]
                    p = vert_p + contact_move
                    contact_vert = obj_vert[contact_pool[person][hand_name]]
                    # print(contact_vert.shape)
                    new_idx = find_closest_point(p, contact_vert)
                    [k, index, _] = pcd_tree.search_radius_vector_3d(obj_vert[contact_pool[person][hand_name][new_idx]], 0.01)
                    temp_color[index] = [1, 0, 0]
                    contact_info_sample[person]["closest_point"][:, :5] = contact_pool[person][hand_name][new_idx]
                elif hand_name == "right_hand":
                    vert_idx = contact_info_sample[person]["closest_point"][0, 5]
                    vert_p = obj_vert[vert_idx]
                    p = vert_p + contact_move
                    contact_vert = obj_vert[contact_pool[person][hand_name]]
                    new_idx = find_closest_point(p, contact_vert)
                    [k, index, _] = pcd_tree.search_radius_vector_3d(obj_vert[contact_pool[person][hand_name][new_idx]], 0.01)
                    temp_color[index] = [0, 1, 0]
                    contact_info_sample[person]["closest_point"][:, 5:] = contact_pool[person][hand_name][new_idx]


        pcd.colors = o3d.utility.Vector3dVector(temp_color)
        o3d.io.write_point_cloud(join(data_dir, "contact_sample.ply"), pcd)
        score = eval(input("contact score: "))