import os
from os.path import join, isfile
import numpy as np
import torch
from CORE4D_utils.load_data import load_HOI_data_aligned_by_objposition
import trimesh


def preprocess_gt_info_smplx(smplx_motion, device="cuda:0"):
    gt_joint_positions = torch.from_numpy(smplx_motion["joint_positions"]).to(device)

    N_frame = gt_joint_positions.shape[0]

    h1_to_smplx = torch.tensor([[0,1,0],[0,0,1],[1,0,0]]).to(dtype=torch.float32).to(device)
    gt_joint_orientations = torch.einsum('bcij,bcjk->bcik', torch.from_numpy(smplx_motion["joint_global_rotations"]).to(device), h1_to_smplx.reshape(1, 1, 3, 3))

    ######
    # move some gt joint positions
    # move wrist to palm
    position_smplx_palm_to_wrist = torch.tensor([0, 0.10, 0.0]).to(dtype=torch.float32).to(device)
    gt_joint_positions[:, 20] += torch.matmul(position_smplx_palm_to_wrist, gt_joint_orientations[:, 18].permute(0, 2, 1))
    position_smplx_palm_to_wrist = torch.tensor([0, -0.10, 0.0]).to(dtype=torch.float32).to(device)
    gt_joint_positions[:, 21] += torch.matmul(position_smplx_palm_to_wrist, gt_joint_orientations[:, 19].permute(0, 2, 1))
    # remove shoes' height
    position_smplx_realfoot_to_foot = torch.tensor([0, 0, -0.05]).to(dtype=torch.float32).to(device)
    gt_joint_positions[:, 7] += torch.matmul(position_smplx_realfoot_to_foot, gt_joint_orientations[:, 4].permute(0, 2, 1))
    gt_joint_positions[:, 8] += torch.matmul(position_smplx_realfoot_to_foot, gt_joint_orientations[:, 5].permute(0, 2, 1))
    ######

    return N_frame, gt_joint_positions, gt_joint_orientations


if __name__ == "__main__":

    ###################################################################################################################
    sequence_root = "/home/liuyun/Humanoid_IL_Benchmark/Kinematic_Data/CORE4D_singleperson_touchpoint_data"
    save_root = "./CORE4D_touch_point_new"
    old_save_root = "./CORE4D_touch_point"
    device = "cuda:0"
    ###################################################################################################################

    smplx_model_dir = "./SAMP_utils/models"

    # load gt data
    object_names = os.listdir(sequence_root)
    object_names.sort()
    sequence_infos = []
    for object_name in object_names:
        for seq_name in os.listdir(join(sequence_root, object_name)):
            sequence_infos.append([object_name, seq_name])

    for sequence_info in sequence_infos:
        obj_name, seq_name = sequence_info
        print("processing {} ...".format(seq_name))
        sequence_dir = join(sequence_root, obj_name, seq_name)

        if not isfile(join(old_save_root, obj_name, seq_name, "h1_kinematic_motions", seq_name + "_data.npz")):
            print("skip!!!!!!")
            continue
        
        try:
            human_motion, obj_posed_mesh = load_HOI_data_aligned_by_objposition(sequence_dir, "person", smplx_model_dir, start_frame=0, end_frame=-1, sampling_rate=1)
        except:
            print("[error] skip {}/{} !!!".format(obj_name, seq_name))
        
        results = np.load(join(old_save_root, obj_name, seq_name, "h1_kinematic_motions", seq_name + "_data.npz"), allow_pickle=True)["arr_0"].item()
        N_frame, gt_joint_positions, gt_joint_orientations = preprocess_gt_info_smplx(human_motion, device=device)
        task_target_hand_positions = gt_joint_positions[-1, 20:22].detach().cpu().numpy()
        results["task_target_hand_positions"] = task_target_hand_positions
        os.makedirs(join(save_root, obj_name, seq_name, "h1_kinematic_motions"), exist_ok=True)
        obj_posed_mesh.export(join(save_root, obj_name, seq_name, "scene_mesh.obj"))
        np.savez(join(save_root, obj_name, seq_name, "h1_kinematic_motions", seq_name + "_data.npz"), results)
        