import os

# Set environment variables BEFORE importing mujoco
os.environ["MUJOCO_GL"] = "osmesa"

import matplotlib.animation as animation
import matplotlib.pyplot as plt
import mujoco
import numpy as np
import pinocchio as pin
from gymnasium import spaces
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from scipy.spatial.transform import Rotation
from tqdm import trange

from sim.ik import Arm_IK


class LinePath:
    def __init__(self):

        # the joint sampling space
        max_joint_rad = np.array([1.57, 1.57, 0, 0, 1.22, 0])
        min_joint_rad = np.array([-1.57, 0, -1.57, 0, 0, 0])
        self.joint_space = spaces.Box(low=min_joint_rad, high=max_joint_rad, shape=(6,))

        # the quat sampling space
        # np.array([0, -1, 0, 0]) is  vertical down
        # example: [0.08522, -0.95511, -0.03461, -0.28161]
        max_vec_rad = np.array([0.1, -0.9, 0.1, 0.3])
        min_vec_rad = np.array([0.0, -1, -0.1, -0.3])
        self.quat_space = spaces.Box(low=min_vec_rad, high=max_vec_rad, shape=(4,))

        # # the position sampling space
        max_pos_m = np.array([1, 0.5, 1])
        min_pos_m = np.array([0, -0.5, -0.1])
        self.pos_space = spaces.Box(low=min_pos_m, high=max_pos_m, shape=(3,))

        self.ik = Arm_IK()
        # print(f"processing {len(self.joint_pts)} anchor points...")
        # self.anchor_pts = [self.ik.ik_solve(_)[0] for _ in tqdm(self.joint_pts)]
        # print("done")
        self.speed_costant = 0.01  # the smaller, higher control Hz

    def sample_trajectories(self, num_trajectories: int = 1):

        x_trajectories = []
        j_trajectories = []

        while len(x_trajectories) < num_trajectories:
            x_pos, j_pos = self.sample_path()
            x_trajectories.append(x_pos)
            j_trajectories.append(j_pos)
        return {"x_trajectories": np.stack(x_trajectories), "j_trajectories": np.stack(j_trajectories)}

    def sample_non_collision_joint(self):
        while True:
            joint = self.joint_space.sample()

            xyz = self.ik.forwardKinematics(joint).translation
            if xyz[-1] < 0.01:
                # collison with the ground
                continue

            if not self.ik.check_self_collision(joint):
                return joint

    def sample_path(self):
        start_joint = self.sample_non_collision_joint()
        start_se3 = self.ik.forwardKinematics(start_joint)

        while True:
            end_joint = self.sample_non_collision_joint()
            end_se3 = self.ik.forwardKinematics(end_joint)

            dist = np.linalg.norm(end_se3.translation - start_se3.translation)  # Only consider xyz distance
            if dist > self.speed_costant * 10:
                # at least one middle points
                break

        # Generate intermediate points for interpolation
        t_pts = np.linspace(0, 1, int(dist / self.speed_costant) + 2)

        j_pos = np.stack([(1 - t) * start_joint + t * end_joint for t in t_pts])
        return {
            "j_pos": j_pos,
            "start_pos": pin.SE3ToXYZQUAT(start_se3),
            "end_pos": pin.SE3ToXYZQUAT(end_se3),
        }

        # # Separate position (xyz) and orientation (roll, pitch, yaw)
        # start_pos_xyz = start_pos[:3]
        # start_pos_rpy = start_pos[3:]
        # end_pos_xyz = end_pos[:3]
        # end_pos_rpy = end_pos[3:]

        # # Linear interpolation between start and end positions
        # xyz_points = np.stack([(1 - t) * start_pos_xyz + t * end_pos_xyz for t in t_pts])
        # rpy_points = np.stack([self._slerp(start_pos_rpy, end_pos_rpy, t) for t in t_pts])

        # x_pos = np.concatenate([xyz_points, rpy_points], axis=-1)

        # j_pos = np.stack([self.ik.ik_solve(x)[0] for x in x_pos])

        # return x_pos, j_pos

    def _slerp(self, rpy1, rpy2, t):
        """Spherical linear interpolation between two quaternions"""
        # Ensure shortest path
        q1 = Rotation.from_euler("xyz", rpy1).as_quat()
        q2 = Rotation.from_euler("xyz", rpy2).as_quat()

        if np.dot(q1, q2) < 0:
            q2 = -q2

        # Calculate angle between quaternions
        cos_theta = np.dot(q1, q2)
        cos_theta = np.clip(cos_theta, -1.0, 1.0)
        theta = np.arccos(cos_theta)

        if theta < 1e-6:
            return q1

        # SLERP formula
        sin_theta = np.sin(theta)
        w1 = np.sin((1 - t) * theta) / sin_theta
        w2 = np.sin(t * theta) / sin_theta

        inter_quat = w1 * q1 + w2 * q2
        return Rotation.from_quat(inter_quat).as_euler("xyz")


def save_frames_as_mp4(
    joints,
    output_path="simulation_video.mp4",
    fps=30,
    resolution=(960, 540),
    dpi=300,
):
    """Save joint trajectories as MP4 video using matplotlib with high resolution options and camera control"""

    model = mujoco.MjModel.from_xml_path("piper_description/mujoco_model/piper_description.xml")
    data = mujoco.MjData(model)

    scene_option = mujoco.MjvOption()
    camera = mujoco.MjvCamera()
    camera.distance = 1.5
    camera.azimuth = 180

    # Simulate and collect frames
    frames = []
    mujoco.mj_resetData(model, data)

    # Calculate figure size based on resolution and DPI
    width_inches = resolution[0] / dpi
    height_inches = resolution[1] / dpi

    with mujoco.Renderer(model, width=resolution[0], height=resolution[1]) as renderer:
        for j in joints:
            data.qpos = np.concatenate([j, np.zeros((2,))])
            mujoco.mj_step(model, data)
            renderer.update_scene(data, scene_option=scene_option, camera=camera)
            pixels = renderer.render()
            frames.append(pixels)

    # Create matplotlib figure with high resolution
    fig, ax = plt.subplots(figsize=(width_inches, height_inches), dpi=dpi)
    ax.set_xlim(0, frames[0].shape[1])
    ax.set_ylim(frames[0].shape[0], 0)  # Invert y-axis for image coordinates
    ax.axis("off")

    # Create image object
    img = ax.imshow(frames[0])

    def animate(frame_idx):
        # Convert frame to proper format for matplotlib
        frame = frames[frame_idx]
        if frame.dtype != np.uint8:
            frame = (frame * 255).astype(np.uint8)
        img.set_array(frame)
        return [img]

    # Create animation
    anim = animation.FuncAnimation(fig, animate, frames=len(frames), interval=1000 / fps, blit=True, repeat=False)

    # Save animation with high quality settings
    print(f"Saving high-resolution animation to {output_path}...")
    print(f"Resolution: {resolution[0]}x{resolution[1]}, DPI: {dpi}")

    # Use high-quality ffmpeg settings
    anim.save(
        output_path,
        writer="ffmpeg",
        fps=fps,
        dpi=dpi,
        extra_args=["-vcodec", "libx264", "-pix_fmt", "yuv420p", "-crf", "18"],
    )
    plt.close(fig)
    print(f"High-resolution video saved to: {output_path}")


if __name__ == "__main__":

    # anchor_points = np.array([[0.19561, -0.0, 0.22539, -0.00004, 1.48359, -0.00005],
    #   [0.34803, 0.27306, 0.05, 3.12, 0.575, -2.97]])
    self = LinePath()
    path = self.sample_path()

    self.ik.visualize(path["j_pos"][0], np.array(path["end_pos"][:3]), np.array(path["end_pos"][3:]))

    # TODO: solutions will 鬼畜！

    save_frames_as_mp4(path["j_pos"])

    # with open("data/json_data/can0_L20Full.json", "r") as f:
    #     joint_pts = np.array(json.load(f)['joint'])
    #     joint_pts = np.deg2rad(joint_pts)
    #     save_frames_as_mp4(joint_pts[:500])
