import os
from os.path import join
import numpy as np
import torch
from CORE4D_utils.load_data import load_HOI_data_aligned_by_objposition
import trimesh
from transforms3d.axangles import mat2axangle
from scipy.spatial.transform  import Rotation as sRot
from h1_kinematics import H1_Motion_Model, load_urdf, get_h1_link_names, forward_kinematics, save_predicted_h1_motion
from utils.rotation import rot_decompose
import open3d as o3d


def point_distances(A, B):
    """
    A: np.float32, shape = (N, 2/3), unit: m
    B: np.float32, shape = (M, 2/3), unit: m
    return np.float32, shape = (N, M), unit: m
    """
    N, M = A.shape[0], B.shape[0]
    aa = (A**2).sum(axis=1).reshape(N, 1).repeat(M, axis=1)
    bb = (B**2).sum(axis=1).reshape(1, M).repeat(N, axis=0)
    ab = A @ B.T
    D = (aa + bb - 2 * ab).clip(0, None)**0.5
    return D


def evaluation_touch_point(data, kinematic_data_hand_to_object_distance_threshold=0.15, retargeted_hand_to_kinematic_target_distance_threshold=0.075):
    results = {
        "kinematic_data_left_hand_to_object_distance": [],  # float
        "kinematic_data_right_hand_to_object_distance": [],  # float
        "kinematic_data_task_success": [],  # True / False
        "final_left_hand_to_target_hand_distance": [],  # float
        "final_right_hand_to_target_hand_distance": [],  # float
        "retargeted_data_task_success": [],  # True / False
    }
    for data_idx, one_data in enumerate(data):
        motion_data, scene_pts = one_data["motion_data"], one_data["scene_pts"]

        # evaluate kinematic data
        kinematic_data_hand_positions = motion_data["task_target_hand_positions"]  # (2, 3)
        kinematic_data_hand_to_object_distances = point_distances(kinematic_data_hand_positions, scene_pts)  # (2, 10000)
        results["kinematic_data_left_hand_to_object_distance"].append(kinematic_data_hand_to_object_distances[0].min())
        results["kinematic_data_right_hand_to_object_distance"].append(kinematic_data_hand_to_object_distances[1].min())
        results["kinematic_data_task_success"].append((kinematic_data_hand_to_object_distances[0].min() < kinematic_data_hand_to_object_distance_threshold) or (kinematic_data_hand_to_object_distances[1].min() < kinematic_data_hand_to_object_distance_threshold))

        # evaluate retargeted data
        position_hand_to_elbow = np.float32([0.30, 0, 0.0])
        final_left_elbow_pose = motion_data["link_global_poses"]["left_elbow_link"][-1]
        final_right_elbow_pose = motion_data["link_global_poses"]["right_elbow_link"][-1]
        final_left_hand_position = (position_hand_to_elbow.reshape(1, 3) @ final_left_elbow_pose[:3, :3].T).reshape(3) + final_left_elbow_pose[:3, 3]
        final_right_hand_position = (position_hand_to_elbow.reshape(1, 3) @ final_right_elbow_pose[:3, :3].T).reshape(3) + final_right_elbow_pose[:3, 3]
        final_left_hand_to_target_hand_distance = ((final_left_hand_position - kinematic_data_hand_positions[0])**2).sum()**0.5
        final_right_hand_to_target_hand_distance = ((final_right_hand_position - kinematic_data_hand_positions[1])**2).sum()**0.5
        results["final_left_hand_to_target_hand_distance"].append(final_left_hand_to_target_hand_distance)
        results["final_right_hand_to_target_hand_distance"].append(final_right_hand_to_target_hand_distance)
        results["retargeted_data_task_success"].append((final_left_hand_to_target_hand_distance < retargeted_hand_to_kinematic_target_distance_threshold) and (final_right_hand_to_target_hand_distance < retargeted_hand_to_kinematic_target_distance_threshold))

    print("###### evaluation results: ######")
    for key in results:
        results[key] = np.float32(results[key])
        print("[{}] {} (+-{})".format(key, np.mean(results[key]), np.std(results[key])))
    print("#################################")
    return results


def evaluation(data, task=None):
    if task == "touch":
        return evaluation_touch_point(data)
    else:
        raise NotImplementedError


if __name__ == "__main__":

    ###########################################################################
    retargeted_data_dir = "/home/liuyun/Humanoid_IL_Benchmark/retargeting/CORE4D_touch_point"
    task = "touch"
    ###########################################################################

    # get scene names
    scene_names = os.listdir(retargeted_data_dir)
    scene_names.sort()
    if task == "touch":
        obj_names = scene_names.copy()
        scene_names = []
        for obj_name in obj_names:
            obj_scene_names = os.listdir(join(retargeted_data_dir, obj_name))
            obj_scene_names.sort()
            scene_names += [join(obj_name, x) for x in obj_scene_names]
    # get data
    data = []
    for scene_name in scene_names:
        scene_dir = join(retargeted_data_dir, scene_name)
        scene_mesh = o3d.io.read_triangle_mesh(join(scene_dir, "scene_mesh.obj"))
        scene_pcd = scene_mesh.sample_points_uniformly(number_of_points=10000)
        scene_pts = np.float32(scene_pcd.points)  # (10000, 3)
        scene_motion_dir = join(scene_dir, "h1_kinematic_motions")
        motion_names = os.listdir(scene_motion_dir)
        motion_names.sort()
        for motion_name in motion_names:
            motion_data = np.load(join(scene_motion_dir, motion_name), allow_pickle=True)["arr_0"].item()
            data.append({"motion_data": motion_data, "scene_pts": scene_pts})
    print("retargeted motion number:", len(data))

    result = evaluation(data, task=task)
