import os
from os.path import join
import numpy as np
import torch
from CORE4D_utils.load_data import load_HOI_data_aligned_by_objpose
from CORE4D_utils.urdf_and_mesh import create_and_save_urdf, create_and_save_mesh
import trimesh
from transforms3d.axangles import mat2axangle
from scipy.spatial.transform  import Rotation as sRot
from h1_kinematics import H1_Motion_Model, load_urdf, get_h1_link_names, forward_kinematics, save_predicted_h1_motion_with_object_info
from utils.rotation import rot_decompose
import open3d as o3d


def get_joint_position_correspondence():
    """
    [h1 joint index, corresponding SMPLX joint index, father index of the H1 joint on the abstract skeleton, father index of the SMPLX joint on the abstract skeleton]
    """
    joint_correspondence = [
        [0, 0, -1, -1],
        [1, 1, 0, 0],
        [2, 1, 0, 0],
        # [3, 1],
        [4, 4, 2, 1],
        [5, 7, 4, 4],
        [6, 2, 0, 0],
        [7, 2, 0, 0],
        # [8, 2],
        [9, 5, 7, 2],
        [10, 8, 9, 5],
        [11, 0, -1, -1],
        [12, 16, 11, 0],
        # [13, 16],
        # [14, 18],
        [15, 18, 12, 16],
        [16, 17, 11, 0],
        # [17, 17],
        # [18, 19],
        [19, 19, 16, 17],
    ]
    return joint_correspondence


def get_joint_orientation_correspondence():
    """
    [h1 joint index, corresponding SMPLX joint index]
    """
    joint_correspondence = [
        # [0, 0],
        # [1, 1],
        # [2, 1],
        # [3, 1],
        [4, 4],
        # [5, 7],
        # [6, 2],
        # [7, 2],
        # [8, 2],
        [9, 5],
        # [10, 8],
        # [11, 0],
        # [12, 16],
        # [13, 16],
        # [14, 18],
        # [15, 18],
        # [16, 17],
        # [17, 17],
        # [18, 19],
        # [19, 19],
    ]
    return joint_correspondence


def preprocess_gt_info_smplx(smplx_motion, device="cuda:0"):
    gt_joint_positions = torch.from_numpy(smplx_motion["joint_positions"]).to(device)

    N_frame = gt_joint_positions.shape[0]

    h1_to_smplx = torch.tensor([[0,1,0],[0,0,1],[1,0,0]]).to(dtype=torch.float32).to(device)
    gt_joint_orientations = torch.einsum('bcij,bcjk->bcik', torch.from_numpy(smplx_motion["joint_global_rotations"]).to(device), h1_to_smplx.reshape(1, 1, 3, 3))

    ######
    # move some gt joint positions
    # move wrist to palm
    position_smplx_palm_to_wrist = torch.tensor([0, 0.10, 0.0]).to(dtype=torch.float32).to(device)
    gt_joint_positions[:, 20] += torch.matmul(position_smplx_palm_to_wrist, gt_joint_orientations[:, 18].permute(0, 2, 1))
    position_smplx_palm_to_wrist = torch.tensor([0, -0.10, 0.0]).to(dtype=torch.float32).to(device)
    gt_joint_positions[:, 21] += torch.matmul(position_smplx_palm_to_wrist, gt_joint_orientations[:, 19].permute(0, 2, 1))
    # remove shoes' height
    position_smplx_realfoot_to_foot = torch.tensor([0, 0, -0.05]).to(dtype=torch.float32).to(device)
    gt_joint_positions[:, 7] += torch.matmul(position_smplx_realfoot_to_foot, gt_joint_orientations[:, 4].permute(0, 2, 1))
    gt_joint_positions[:, 8] += torch.matmul(position_smplx_realfoot_to_foot, gt_joint_orientations[:, 5].permute(0, 2, 1))
    ######

    return N_frame, gt_joint_positions, gt_joint_orientations


def align_bone_length_to_H1(N_frame, gt_joint_positions, gt_joint_orientations):
    # prepare info
    joint_correspondence = get_joint_position_correspondence()
    H1_link_position_example = np.load("./utils/20link_pose_example.npy")[:, :3, 3]  # (20, 3)

    device = gt_joint_positions.device
    gt_joint_positions = gt_joint_positions.detach().cpu()

    aligned_gt_joint_positions = torch.zeros(gt_joint_positions.shape)
    for frame_idx in range(N_frame):
        gt = gt_joint_positions[frame_idx]  # (127, 3)
        delta = torch.zeros(gt.shape)
        # get the bias for each joint
        for joint_corr in joint_correspondence:
            curr_h1, curr_human, parent_h1, parent_human = joint_corr
            if parent_human == -1:
                continue
            h1_bone_length = ((H1_link_position_example[curr_h1] - H1_link_position_example[parent_h1])**2).sum()**0.5
            curr_human_bone = gt[curr_human] - gt[parent_human]
            delta[curr_human] = h1_bone_length * (curr_human_bone / torch.clamp(curr_human_bone.norm(p=None), 1e-6, None)) - curr_human_bone
        # accumulate the bias
        for joint_corr in joint_correspondence:
            _, curr_human, _, parent_human = joint_corr
            if parent_human == -1:
                continue
            delta[curr_human] += delta[parent_human]
        delta[20] += delta[18]
        delta[21] += delta[19]
        # save
        aligned_gt_joint_positions[frame_idx] = gt + delta

    aligned_gt_joint_positions = aligned_gt_joint_positions.to(dtype=gt_joint_positions.dtype).to(device)
    return N_frame, aligned_gt_joint_positions, gt_joint_orientations


def complete_h1_motion(h1_motion, chain, link_names, device="cuda:0"):
    pred_link_to_world_dict = forward_kinematics(chain, link_names, h1_motion["joint_angles"], global_rotation=h1_motion["global_rotations"], global_translation=h1_motion["global_translations"], device=device)
    pred_link_to_world_dict_np = {}
    for link_name in pred_link_to_world_dict:
        pred_link_to_world_dict_np[link_name] = pred_link_to_world_dict[link_name].detach().cpu().numpy()  # (N_frame, 4, 4)
    h1_motion["link_global_poses"] = pred_link_to_world_dict_np
    return h1_motion


def optimization_smplx_to_h1(chain, link_names, smplx_motion, object_mesh, object_motion, method_name="normal_optimization", device="cuda:0"):
    """
    chain: h1 chain
    link_names: h1 link names
    smplx_motion: {"joint_positions": (N, 127, 3),"joint_global_rotations": (N, 22, 3, 3)}, all items are numpy

    return: {"joint_angles": (N, 19), "global_rotation": (N, 3), "global_orientation": (N, 3)}
    """

    N_frame, gt_joint_positions, gt_joint_orientations = preprocess_gt_info_smplx(smplx_motion, device=device)
    if method_name == "omnih2o":
        print("before:", gt_joint_positions[-1, 20:22])
        N_frame, gt_joint_positions, gt_joint_orientations = align_bone_length_to_H1(N_frame, gt_joint_positions, gt_joint_orientations)
        print("after:", gt_joint_positions[-1, 20:22])

    ########################## start optimization #################################
    # h1_motion_model = H1_Motion_Model(batch_size=N_frame, device=device)
    init_global_translations = gt_joint_positions[:, 0].clone()  # (N, 3)
    init_global_rotations = []
    for i in range(N_frame):
        axis, angle = mat2axangle(gt_joint_orientations[i, 0].detach().cpu().numpy(), unit_thresh=1e-3)
        init_global_rotations.append(axis * angle)
    init_global_rotations = torch.from_numpy(np.float32(init_global_rotations)).to(device)  # (N, 3)
    h1_motion_model = H1_Motion_Model(batch_size=N_frame, init_global_translations=init_global_translations, init_global_rotations=init_global_rotations, device=device)

    optimizer = torch.optim.Adam(h1_motion_model.parameters(), lr=2e-2)
    h1_motion_model.train()

    joint_position_corrs = get_joint_position_correspondence()
    joint_orientation_corrs = get_joint_orientation_correspondence()

    position_hand_to_elbow = torch.tensor([0.30, 0, 0.0]).to(dtype=torch.float32).to(device)

    for epoch in range(3000):
        h1_motion = h1_motion_model()

        pred_link_to_world_dict = forward_kinematics(chain, link_names, h1_motion["joint_angles"], global_rotation=h1_motion["global_rotations"], global_translation=h1_motion["global_translations"], device=device)

        joint_global_position_loss = 0
        for joint_corr in joint_position_corrs:
            if ("pelvis" in link_names[joint_corr[0]]) or ("ankle" in link_names[joint_corr[0]]):
                scale = 5.0
            else:
                scale = 1.0
            joint_global_position_loss += scale * ((pred_link_to_world_dict[link_names[joint_corr[0]]][:, :3, 3] - gt_joint_positions[:, joint_corr[1]])**2).sum(dim=-1).mean()
        
        hand_global_position_loss = 0
        pred_left_hand_positions = torch.matmul(position_hand_to_elbow, pred_link_to_world_dict["left_elbow_link"][:, :3, :3].permute(0, 2, 1)) + pred_link_to_world_dict["left_elbow_link"][:, :3, 3]
        hand_global_position_loss += ((pred_left_hand_positions - gt_joint_positions[:, 20])**2).sum(dim=-1).mean()
        pred_right_hand_positions = torch.matmul(position_hand_to_elbow, pred_link_to_world_dict["right_elbow_link"][:, :3, :3].permute(0, 2, 1)) + pred_link_to_world_dict["right_elbow_link"][:, :3, 3]
        hand_global_position_loss += ((pred_right_hand_positions - gt_joint_positions[:, 21])**2).sum(dim=-1).mean()

        pred_joint_angles = h1_motion["joint_angles"]
        pred_joint_velocities = pred_joint_angles[1:] - pred_joint_angles[:-1]
        pred_joint_accelerations = pred_joint_velocities[1:] - pred_joint_velocities[:-1]
        pred_root_linear_velocities = pred_link_to_world_dict["pelvis"][1:, :3, 3] - pred_link_to_world_dict["pelvis"][:-1, :3, 3]
        pred_root_linear_acceleration = pred_root_linear_velocities[1:] - pred_root_linear_velocities[:-1]
        joint_local_velocity_loss = pred_joint_velocities.abs().sum(dim=-1).mean()
        joint_local_acceleration_loss = pred_joint_accelerations.abs().sum(dim=-1).mean()
        root_global_linear_acceleration_loss = (pred_root_linear_acceleration**2).sum(dim=-1).mean()

        # joint global rotation loss
        # TODO 手腕朝向加约束
        joint_global_orientation_loss = 0
        for joint_corr in joint_orientation_corrs:
            pred_R = pred_link_to_world_dict[link_names[joint_corr[0]]][:, :3, :3]  # (N, 3, 3)
            gt_R = gt_joint_orientations[:, joint_corr[1]]  # (N, 3, 3)
            joint_global_orientation_loss += (pred_R - gt_R).abs().sum(dim=-1).sum(dim=-1).mean()

        # TODO: add contact loss

        loss = 1.0 * joint_global_position_loss + 5.0 * hand_global_position_loss + 1.0 * joint_local_acceleration_loss + 0.0 * joint_global_orientation_loss

        if epoch % 100 == 0:
            print(epoch, loss.item(), joint_global_position_loss.item(), hand_global_position_loss.item(), joint_local_acceleration_loss.item(), joint_global_orientation_loss.item())

        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
    
    ########################## finish optimization ################################

    h1_motion = h1_motion_model()
    h1_motion = complete_h1_motion(h1_motion, chain, link_names, device=device)
    return h1_motion


def copy_rotation_smplx_to_h1(chain, link_names, smplx_motion, device="cuda:0"):
    """
    chain: h1 chain
    link_names: h1 link names
    smplx_motion: {"joint_positions": (N, 127, 3),"joint_global_rotations": (N, 22, 3, 3)}, all items are numpy

    return: {"joint_angles": (N, 19), "global_rotation": (N, 3), "global_orientation": (N, 3)}
    """

    N_frame, gt_joint_positions, gt_joint_orientations = preprocess_gt_info_smplx(smplx_motion, device=device)

    ########################## start copy rotation #################################
    global_translations = gt_joint_positions[:, 0].clone()  # (N, 3)
    global_rotations = []
    for i in range(N_frame):
        axis, angle = mat2axangle(gt_joint_orientations[i, 0].detach().cpu().numpy(), unit_thresh=1e-3)
        global_rotations.append(axis * angle)
    global_rotations = torch.from_numpy(np.float32(global_rotations)).to(device)  # (N, 3)

    joint_angles = torch.zeros(N_frame, 19).to(dtype=torch.float32).to(device)
    # thighs
    R_left_thigh_to_pelvis = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 0].permute(0, 2, 1), gt_joint_orientations[:, 1])
    R_right_thigh_to_pelvis = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 0].permute(0, 2, 1), gt_joint_orientations[:, 2])
    euler = torch.from_numpy(sRot.from_matrix(R_left_thigh_to_pelvis.detach().cpu().numpy()).as_euler('yxz'))[:, [2,1,0]]
    joint_angles[:, 0:3] = euler
    euler = torch.from_numpy(sRot.from_matrix(R_right_thigh_to_pelvis.detach().cpu().numpy()).as_euler('yxz'))[:, [2,1,0]]
    joint_angles[:, 5:8] = euler
    # knees
    R_left_knee_to_thigh = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 1].permute(0, 2, 1), gt_joint_orientations[:, 4])
    R_right_knee_to_thigh = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 2].permute(0, 2, 1), gt_joint_orientations[:, 5])
    euler = torch.from_numpy(sRot.from_matrix(R_left_knee_to_thigh.detach().cpu().numpy()).as_euler('yzx'))[:, 0]
    joint_angles[:, 3] = euler
    euler = torch.from_numpy(sRot.from_matrix(R_right_knee_to_thigh.detach().cpu().numpy()).as_euler('yzx'))[:, 0]
    joint_angles[:, 8] = euler
    # ankles
    R_left_ankle_to_knee = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 4].permute(0, 2, 1), gt_joint_orientations[:, 7])
    R_right_ankle_to_knee = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 5].permute(0, 2, 1), gt_joint_orientations[:, 8])
    euler = torch.from_numpy(sRot.from_matrix(R_left_ankle_to_knee.detach().cpu().numpy()).as_euler('yzx'))[:, 0]
    joint_angles[:, 4] = euler
    euler = torch.from_numpy(sRot.from_matrix(R_right_ankle_to_knee.detach().cpu().numpy()).as_euler('yzx'))[:, 0]
    joint_angles[:, 9] = euler
    # torso
    R_torso_to_pelvis = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 0].permute(0, 2, 1), gt_joint_orientations[:, 9])
    euler = torch.from_numpy(sRot.from_matrix(R_torso_to_pelvis.detach().cpu().numpy()).as_euler('zxy'))[:, 0]
    joint_angles[:, 10] = euler
    # shoulders and elbows
    theta = np.pi / 180 * 25
    left_shoulder_pitch_rot = sRot.from_euler('x', theta)
    left_shoulder_pitch_axis = left_shoulder_pitch_rot.apply([0,1,0])
    right_shoulder_pitch_rot = sRot.from_euler('x', -theta)
    right_shoulder_pitch_axis = right_shoulder_pitch_rot.apply([0,1,0])
    R_left_shoulder_to_torso = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 9].permute(0, 2, 1), gt_joint_orientations[:, 16]).detach().cpu().numpy()
    Ryp_theta, Rxz = rot_decompose(R_left_shoulder_to_torso, left_shoulder_pitch_axis)
    Rx_theta, Rz = rot_decompose(Rxz, np.float64([1,0,0]))
    Rz_theta, _ = rot_decompose(Rz, np.float64([0,0,1]))
    euler = torch.stack([Ryp_theta, Rx_theta + np.pi / 2, Rz_theta], dim=-1)
    joint_angles[:, 11:14] = euler
    R_right_shoulder_to_torso = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 9].permute(0, 2, 1), gt_joint_orientations[:, 17]).detach().cpu().numpy()
    Ryp_theta, Rxz = rot_decompose(R_right_shoulder_to_torso, right_shoulder_pitch_axis)
    Rx_theta, Rz = rot_decompose(Rxz, np.float64([1,0,0]))
    Rz_theta, _ = rot_decompose(Rz, np.float64([0,0,1]))
    euler = torch.stack([Ryp_theta, Rx_theta - np.pi / 2, Rz_theta], dim=-1)
    joint_angles[:, 15:18] = euler
    R_left_elbow_to_shoulder = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 16].permute(0, 2, 1), gt_joint_orientations[:, 18]).detach().cpu().numpy()
    left_elbow_angle, left_elbow_R2 = rot_decompose(R_left_elbow_to_shoulder, np.float64([0,0,1]))
    left_shoulder_conpen, _ = rot_decompose(left_elbow_R2, np.float64([1,0,0]))
    joint_angles[:, 13] += left_shoulder_conpen.to(device)
    joint_angles[:, 14] = left_elbow_angle + np.pi/2
    R_right_elbow_to_shoulder = torch.einsum('bij,bjk->bik', gt_joint_orientations[:, 17].permute(0, 2, 1), gt_joint_orientations[:, 19]).detach().cpu().numpy()
    right_elbow_angle, right_elbow_R2 = rot_decompose(R_right_elbow_to_shoulder, np.float64([0,0,-1]))
    right_shoulder_conpen, _ = rot_decompose(right_elbow_R2, np.float64([1,0,0]))
    joint_angles[:, 17] += right_shoulder_conpen.to(device)
    joint_angles[:, 18] = right_elbow_angle + np.pi/2
    ########################## finish copy rotation ################################

    h1_motion = {
        "joint_angles": joint_angles,
        "global_rotations": global_rotations,
        "global_translations": global_translations,
    }
    h1_motion = complete_h1_motion(h1_motion, chain, link_names, device=device)
    return h1_motion


def retarget_smplx_to_h1(chain, link_names, smplx_motion, object_mesh, object_motion, retargeting_method="optimization", device="cuda:0"):
    if retargeting_method == "optimization":
        return optimization_smplx_to_h1(chain, link_names, smplx_motion, object_mesh, object_motion, device=device)
    elif retargeting_method == "omnih2o":
        return optimization_smplx_to_h1(chain, link_names, smplx_motion, object_mesh, object_motion, method_name="omnih2o", device=device)
    elif retargeting_method == "copy_rotation":
        return copy_rotation_smplx_to_h1(chain, link_names, smplx_motion, device=device)
    else:
        raise NotImplementedError


if __name__ == "__main__":

    ##############################################################################################################################
    sequence_root = "/home/liuyun/Humanoid_IL_Benchmark/Kinematic_Data/CORE4D_singleperson_liftbox_data"
    save_root = "./CORE4D_lift_box_copyrot"

    retargeting_method = "copy_rotation"  # optimization / copy_rotation / omnih2o
    object_augmentation_strategies = [
        [-0.10, 0.20, 0.0],
        [-0.05, 0.20, 0.0],
        [-0.15, 0.20, 0.0],
        [-0.10, 0.30, 0.0],
        [-0.10, 0.10, 0.0],
        [-0.10, 0.20, 0.10],
        [-0.10, 0.20, -0.10],
        [-0.05, 0.10, -0.10],
        [-0.15, 0.30, -0.10],
        [-0.15, 0.10, 0.10],
    ]

    smplx_model_dir = "./SAMP_utils/models"
    device = "cuda:0"
    ##############################################################################################################################

    chain = load_urdf("/home/liuyun/Humanoid_IL_Benchmark/retargeting/assets/h1_description/urdf/h1.urdf", device=device)
    link_names = get_h1_link_names()

    # load gt data
    object_names = os.listdir(sequence_root)
    object_names.sort()
    sequence_infos = []
    for object_name in object_names:
        for seq_name in os.listdir(join(sequence_root, object_name)):
            sequence_infos.append([object_name, seq_name])

    for sequence_info in sequence_infos:
        obj_name, seq_name = sequence_info
        sequence_dir = join(sequence_root, obj_name, seq_name)
        human_motion, object_mesh, object_motion = load_HOI_data_aligned_by_objpose(sequence_dir, "person", smplx_model_dir, start_frame=0, end_frame=-1, sampling_rate=1)
        h1_motion = retarget_smplx_to_h1(chain, link_names, human_motion, object_mesh, object_motion, retargeting_method=retargeting_method, device=device)
        
        # save retargeted data
        obj_model_verts = object_mesh.vertices
        obj_model_bbox = [obj_model_verts.min(axis=0), obj_model_verts.max(axis=0)]
        for idx, obj_aug_strategy in enumerate(object_augmentation_strategies):
            aug_obj_name = obj_name + "_" + str(idx).zfill(3)
            save_dir = join(save_root, aug_obj_name)
            os.makedirs(join(save_dir, "h1_kinematic_motions"), exist_ok=True)
            create_and_save_urdf(obj_model_bbox, np.float32(obj_aug_strategy), join(save_dir, "scene_mesh.urdf"))
            create_and_save_mesh(obj_model_bbox, np.float32(obj_aug_strategy), join(save_dir, "scene_mesh.obj"))
            save_predicted_h1_motion_with_object_info(h1_motion, object_mesh, object_motion, join(save_dir, "h1_kinematic_motions", seq_name + "_data.npz"))
