import os
from os.path import join, isfile, dirname, basename
import sys
sys.path.append("/home/liuyun/HHO-dataset/data_processing/Tink")
sys.path.append("/home/liuyun/HHO-dataset/data_processing/")
sys.path.append("/home/liuyun/codebases/HHO_VAE/")
import argparse
import numpy as np
import pickle
import torch
from torch import nn
import pytorch3d
import pytorch3d.io as IO
import trimesh
from smplx import smplx
from utils.VTS_object import get_obj_info
from utils.load_smplx_params import load_multiperson_smplx_params
from utils.contact import compute_contact_and_closest_point

from optimization.bvh2smplx import Simple_SMPLX, create_SMPLX_model



HAND_VERT_IDS = {
    'lthumb':		5361,
    'lindex':		4933,
    'lmiddle':		5058,
    'lring':		5169,
    'lpinky':		5286,
    'rthumb':		8079,
    'rindex':		7669,
    'rmiddle':		7794,
    'rring':		7905,
    'rpinky':		8022,
}

LEFT_VERT_IDS = {
    'lthumb':		5361,
    'lindex':		4933,
    'lmiddle':		5058,
    'lring':		5169,
    'lpinky':		5286,
}

RIGHT_VERT_IDS = {
    'rthumb':		8079,
    'rindex':		7669,
    'rmiddle':		7794,
    'rring':		7905,
    'rpinky':		8022,
}

def compute_contact_info(human_params, smplx_model, idx, obj_vertices, threshould=0.05, device="cuda:0"):
    """
    [input]
    * human_params: SMPLX params
    * idx: frame idx
    * obj_vertices: torch.float32, shape = (M, 3)
    
    human SMPLX mesh: shape = (N, 3)
    
    [return]
    * contact: torch.bool, shape = (N)
    * dist: torch.float32, shape = (N)
    * closest_point: torch.int64, shape = (N)
    """
    result_model = smplx_model(betas=human_params["betas"][idx:idx+1].detach().to(device), expression=human_params["expression"][idx:idx+1].detach().to(device), global_orient=human_params["global_orient"][idx:idx+1].detach().to(device), transl=human_params["transl"][idx:idx+1].detach().to(device), body_pose=human_params["body_pose"][idx:idx+1].detach().to(device), left_hand_pose=human_params["left_hand_pose"][idx:idx+1].detach().to(device), right_hand_pose=human_params["right_hand_pose"][idx:idx+1].detach().to(device), return_verts=True)
    human_vertices = result_model.vertices[0]  # human vertices
    contact, dist, closest_point = compute_contact_and_closest_point(human_vertices, obj_vertices, threshould=threshould)
    return contact, dist, closest_point


def prepare_contact_pool(data_dir, origin_mesh, start_frame, end_frame, device="cuda:0"):
    
    use_pca, num_pca_params = True, 12

    # (1) load gt human poses and simplify obj_mesh
    try:
        multiperson_SMPLX_params = load_multiperson_smplx_params(join(data_dir, "SMPLX_fitting"), start_frame=start_frame, end_frame=end_frame, device=device)
    except Exception as e:
        return None

    print(multiperson_SMPLX_params["person1"]["body_pose"].shape)
    print(multiperson_SMPLX_params.keys(), multiperson_SMPLX_params["person2"].keys(), multiperson_SMPLX_params["person2"]["body_pose"].shape, multiperson_SMPLX_params["person1"]["joints"].shape)

    
    # (2) compute per-frame HOH meshes and compute contact

    origin_vert, origin_face = origin_mesh.vertices, origin_mesh.faces
    object_dir = join(data_dir, 'aligned_objposes.npy')
    origin_pose = np.load(object_dir, allow_pickle=True)[start_frame:end_frame]
    origin_vert_seq = (np.array(origin_pose[:, :3, :3] @ origin_vert.T)).transpose(0, 2, 1) + np.expand_dims(origin_pose[:, :3, 3], axis=1)
    origin_vert_seq = torch.from_numpy(origin_vert_seq).to(device)  # (N, 3)
    
    ######################################################################
    # preprocess contact areas 10675 x 3
    print("start preparing contact areas ...")
    origin_contact_info = {
        "person1": {"contact": [], "dist": [], "closest_point": []},
        "person2": {"contact": [], "dist": [], "closest_point": []},
    }
    contact_threshould=0.05
    # contact_threshould = 0.01
    smplx_model = create_SMPLX_model(use_pca=use_pca, num_pca_comps=num_pca_params, batch_size=1, device=device)

    for idx in range(0, end_frame - start_frame):
        # person1 to original obj
        
        contact, dist, closest_point = compute_contact_info(multiperson_SMPLX_params["person1"], smplx_model, idx, origin_vert_seq[idx], threshould=contact_threshould, device=device)
        origin_contact_info["person1"]["contact"].append(contact.detach().cpu().numpy())
        origin_contact_info["person1"]["dist"].append(dist.detach().cpu().numpy())
        origin_contact_info["person1"]["closest_point"].append(closest_point.detach().cpu().numpy())
        
        # person2 to original obj
        contact, dist, closest_point = compute_contact_info(multiperson_SMPLX_params["person2"], smplx_model, idx, origin_vert_seq[idx], threshould=contact_threshould, device=device)
        origin_contact_info["person2"]["contact"].append(contact.detach().cpu().numpy())
        origin_contact_info["person2"]["dist"].append(dist.detach().cpu().numpy())
        origin_contact_info["person2"]["closest_point"].append(closest_point.detach().cpu().numpy())
        

    for person in origin_contact_info:
        origin_contact_info[person]["contact"] = torch.tensor(origin_contact_info[person]["contact"], dtype=torch.bool).to(device)
        origin_contact_info[person]["dist"] = torch.tensor(origin_contact_info[person]["dist"], dtype=torch.float32).to(device)
        origin_contact_info[person]["closest_point"] = torch.tensor(origin_contact_info[person]["closest_point"], dtype=torch.int64).to(device)
    print("finish preparing contact areas !!!")

    print(origin_contact_info["person1"]["contact"].shape, origin_contact_info["person1"]["dist"].shape, origin_contact_info["person1"]["closest_point"].shape)
    

    contact_points = {
        "person1": {"dist": [], "closest_point": [], "contact": []},
        "person2": {"dist": [], "closest_point": [], "contact": []},
    }

    for person in contact_points:
        contact_points[person]["dist"] = origin_contact_info[person]["dist"][:, list(HAND_VERT_IDS.values())].detach().cpu().numpy() # N x 10
        contact_points[person]["closest_point"] = origin_contact_info[person]["closest_point"][:, list(HAND_VERT_IDS.values())].detach().cpu().numpy() # N x 10
        contact_points[person]["contact"] = origin_contact_info[person]["contact"][:, list(HAND_VERT_IDS.values())].detach().cpu().numpy()

    new_contact_info = contact_points.copy()

    contact_area = {
        "person1": [],
        "person2": [],
    }
    for person in contact_area:
        contact_area[person] = np.unique(contact_points[person]["closest_point"])



    # 准静态假设
    new_contact_info_from_oakink = new_contact_info.copy()
    contact_point_candidates = {  # 记录每个人每只手全程和哪些oakink的最近点接触
        "person1": {"left_hand": [], "right_hand": [],},
        "person2": {"left_hand": [], "right_hand": [],},
    }
    for person in new_contact_info_from_oakink:
        left_hand_contact_flag = new_contact_info_from_oakink[person]["contact"][:, :5].sum(axis=-1) > 0  # shape = (N_frame,), 有一个指尖接触到物体就算整只手存在contact
        right_hand_contact_flag = new_contact_info_from_oakink[person]["contact"][:, 5:].sum(axis=-1) > 0  # shape = (N_frame,), 有一个指尖接触到物体就算整只手存在contact
        for idx in range(start_frame, end_frame):
            if left_hand_contact_flag[idx]:
                contact_point_candidates[person]["left_hand"].append(new_contact_info_from_oakink[person]["closest_point"][idx, :5])
            if right_hand_contact_flag[idx]:
                contact_point_candidates[person]["right_hand"].append(new_contact_info_from_oakink[person]["closest_point"][idx, 5:])
    for person in contact_point_candidates:
        for hand_name in contact_point_candidates[person]:
            print("[contact_point_candidates] {}, {}: candidate number = {}".format(person, hand_name, len(contact_point_candidates[person][hand_name])))
    
    return contact_point_candidates


def parse_args():
    parser = argparse.ArgumentParser()
    ############# 数据 ########################
    # parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230807_2/002")  # 搬桌子
    # parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230807_2/004")  # 旋转桌子
    parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230807_1/017")  # 搬棍子
    # parser.add_argument('--data_dir', type=str, default="/share/datasets/HHO_dataset/data/20230805_1/002")  # 旋转椅子
    parser.add_argument('--start_frame', '-s', type=int, default=0)
    parser.add_argument('--end_frame', '-e', type=int, default=300)
    parser.add_argument('--save_filename', type=str, default="compare_result.mp4")
    parser.add_argument('--device', type=str, default="cuda:0")
    args = parser.parse_args()
    return args

if __name__ == '__main__':
    
    #########################################################################################
    obj_dataset_dir = "/data3/datasets/HHO_object_dataset_final/"
    hho_dataset_dir = "/share/datasets/hhodataset/VTS/"
    cfg = {
            "retarget_person1": True,
            "retarget_person2": True,
    }
    args = parse_args()

    object_contact_pool = {}


    for dir in os.listdir(hho_dataset_dir):
        if not os.path.isdir(join(hho_dataset_dir, dir)):
                print("skipping...")
                continue
    
        for data_dir in os.listdir(join(hho_dataset_dir, dir)):
            # data_dir = args.data_dir
            if not isfile(join(hho_dataset_dir, dir, data_dir, 'aligned_objposes.npy')):
                print("skipping...")
                continue
            
            data_dir = join(hho_dataset_dir, dir, data_dir)
            

            obj_name, obj_data_path = get_obj_info(data_dir, obj_dataset_dir)
            print(obj_name, obj_data_path)
            
            obj_name = obj_name.lower()
            
            
            
            device = args.device
            start_frame, end_frame = args.start_frame, args.end_frame
            
            if len(np.load(join(data_dir, 'aligned_objposes.npy'), allow_pickle=True)) < end_frame:
                end_frame = len(np.load(join(data_dir, 'aligned_objposes.npy'), allow_pickle=True))
                print("Warning: end_frame is larger than the length of object_result, set end_frame to {}".format(str(end_frame)))

            print(data_dir, "[", start_frame, end_frame, ")")
            
            print(obj_name)
            try:
                origin_mesh = trimesh.load_mesh(obj_data_path)
            except Exception as e:
                print("not file ", obj_data_path)
                continue

            contact_pool = prepare_contact_pool(data_dir, origin_mesh, start_frame, end_frame, device=device)
            if contact_pool is None:
                print("Error: contact_pool is None")
                continue
            if obj_name not in object_contact_pool:
                object_contact_pool[obj_name] = contact_pool
            else:
                for person in contact_pool:
                    for hand_name in contact_pool[person]:
                        object_contact_pool[obj_name][person][hand_name].extend(contact_pool[person][hand_name])

            np.save(join(os.path.abspath(join(obj_data_path, '..')), "contact_pool.npy"), object_contact_pool[obj_name])
            print("save contact_pool.npy to ", join(os.path.abspath(obj_data_path), "contact_pool.npy"))

    
            
