import argparse
import os
import numpy as np
import zarr
import pickle
from termcolor import cprint
import json
from termcolor import colored
from scipy.spatial.transform import Rotation as R

import fpsample
from sklearn.cluster import DBSCAN
from time import time
import pcd_visualizer
from dataclasses import dataclass, field
from typing import List
try:
    from realsense_camera import RealSense_Camera
except ImportError:
    from .realsense_camera import RealSense_Camera


################################# Camera Calibration ##############################################
# refer to https://gist.github.com/hshi74/edabc1e9bed6ea988a2abd1308e1cc96

try:
    from franka_home_real import ARM_HOME, HAND_HOME
except ImportError:
    from .franka_home_real import ARM_HOME, HAND_HOME
class Robot2CamParams:
    def __init__(self, config_path="/home/dwl/DemonGen_dev/real_world/cam.json"):
        with open(config_path, 'r') as f:
            config = json.load(f)
        pos = np.array([config["translation"]["x"], config["translation"]["y"], config["translation"]["z"]])
        self.ROBOT2CAM_POS = pos
        self.pos_initial = self.ROBOT2CAM_POS.copy()
        quat_initial = np.array([config["rotation"]["x"], config["rotation"]["y"], config["rotation"]["z"], config["rotation"]["w"]])
        self.quat_initial = quat_initial
        self.offset_rpy = np.array([0.0, 0.0, 0.0])
        self.offset_pos = np.array([0.0, 0.0, 0.0])
        self.update_orientation()
        self.update_position()
    
    def update_orientation(self):
        ori = R.from_quat(self.quat_initial)
        offset_rot = R.from_euler('xyz', self.offset_rpy, degrees=False)
        ori = ori * offset_rot
        self.ROBOT2CAM_QUAT = ori.as_quat()
    
    def update_position(self):
        self.ROBOT2CAM_POS = self.pos_initial + self.offset_pos
    
    def add_offset_rpy(self, delta_rpy, print_info=True):
        self.offset_rpy += delta_rpy
        self.update_orientation()
        if print_info:
            current_offset = self.offset_rpy.copy()
            total_norm = np.linalg.norm(current_offset)
            print(colored(f"RPY offset applied. {delta_rpy}", "green"), colored(f"Current offset: {current_offset} absolute{self.offset_rpy}", "green"))
        return self.offset_rpy
    
    def add_offset_pos(self, delta_pos, print_info=True):
        self.offset_pos += delta_pos
        self.update_position()
        if print_info:
            current_offset = self.offset_pos.copy()
            total_norm = np.linalg.norm(current_offset)
            print(colored(f"Position offset applied.{delta_pos}", "green"), colored(f"Current offset: {current_offset} absolute{self.offset_pos}", "green"))
        return self.offset_pos
    
    def reset_offset(self):
        self.offset_rpy = np.array([0.0, 0.0, 0.0])
        self.offset_pos = np.array([0.0, 0.0, 0.0])
        self.update_orientation()
        self.update_position()
        return self.offset_rpy, self.offset_pos
    
    def get_offset_rpy(self):
        return self.offset_rpy.copy()
    
    def get_offset_pos(self):
        return self.offset_pos.copy()
    
    def get_pos(self):
        return self.ROBOT2CAM_POS
    
    def get_quat(self):
        return self.ROBOT2CAM_QUAT
    
    def save_calibration(self, config_path="/home/dwl/DemonGen_dev/real_world/cam.json"):
        final_pos = self.pos_initial + self.offset_pos
        final_quat = self.ROBOT2CAM_QUAT
        calibration_data = {"translation": {"x": float(final_pos[0]), "y": float(final_pos[1]), "z": float(final_pos[2])}, "rotation": {"x": float(final_quat[0]), "y": float(final_quat[1]), "z": float(final_quat[2]), "w": float(final_quat[3])}}
        with open(config_path, 'w') as f:
            json.dump(calibration_data, f, indent=2)
        print(f"Calibration saved to {config_path}")
        print(f"Final translation: {final_pos}")
        print(f"Final rotation (quaternion): {final_quat}")
    
    def get_calibration_summary(self):
        final_pos = self.pos_initial + self.offset_pos
        final_quat = self.ROBOT2CAM_QUAT
        summary = {"initial_translation": self.pos_initial.tolist(), "initial_rotation": self.quat_initial.tolist(), "current_offsets": {"position": self.offset_pos.tolist(), "rotation_rpy": self.offset_rpy.tolist()}, "final_calibration": {"translation": final_pos.tolist(), "rotation_quaternion": final_quat.tolist()}}
        print("Calibration Summary:")
        print(f"  Initial translation: {summary['initial_translation']}")
        print(f"  Initial rotation: {summary['initial_rotation']}")
        print(f"  Final translation: {summary['final_calibration']['translation']}")
        print(f"  Final rotation: {summary['final_calibration']['rotation_quaternion']}")
        initial_pos = np.array(summary['initial_translation'])
        final_pos = np.array(summary['final_calibration']['translation'])
        initial_rot = np.array(summary['initial_rotation'])
        final_rot = np.array(summary['final_calibration']['rotation_quaternion'])
        pos_diff = final_pos - initial_pos
        rot_diff = final_rot - initial_rot
        print(f"  Position diff: {pos_diff}")
        print(f"  Rotation diff: {rot_diff}")
        return summary

# Global instance
robot2cam_params = Robot2CamParams()

REALSENSE_SCALE = 0.0010000000474974513

# ROBOT2CAM = np.eye(4)
# ROBOT2CAM[:3, :3] = R.from_quat(ROBOT2CAM_QUAT).as_matrix()
# ROBOT2CAM[:3, 3]  = ROBOT2CAM_POS
###################################################################################################


################################# Hyperparameters for pcd_process ##################################
@dataclass
class PCDProcConfig:
    random_drop_points: int
    outlier_distance: float
    outlier_count: int
    n_points: int
    work_space: List[List[float]]

pcd_config = PCDProcConfig(
    random_drop_points=5000,
    outlier_distance=0.015,
    outlier_count=50,
    n_points=1024,
    work_space=[
        [0.2, 0.8],
        [-0.66, 0.6],
        [-0.00, 0.45]
    ])
###################################################################################################


def preprocess_point_cloud(points, cfg=pcd_config, debug=False):
    points = pcd_crop(points, cfg, debug)
    points = pcd_cluster(points, cfg, debug)
    return points

def pcd_crop(points, cfg=pcd_config, debug=False):
    WORK_SPACE = cfg.work_space
    
    robot2cam_extrinsic_matrix = np.eye(4)
    robot2cam_extrinsic_matrix[:3, :3] = R.from_quat(robot2cam_params.get_quat()).as_matrix()
    robot2cam_extrinsic_matrix[:3, 3] = robot2cam_params.get_pos()

    points_xyz = points[..., :3] * REALSENSE_SCALE
    point_homogeneous = np.hstack((points_xyz, np.ones((points_xyz.shape[0], 1))))
    point_homogeneous = robot2cam_extrinsic_matrix @ np.ascontiguousarray(point_homogeneous.T)
    point_homogeneous = point_homogeneous.T

    point_xyz = point_homogeneous[..., :-1]
    points[..., :3] = point_xyz

    if debug:
        pcd_visualizer.visualize_pointcloud(points)
    
    # Crop
    points = points[np.where((points[..., 0] > WORK_SPACE[0][0]) & (points[..., 0] < WORK_SPACE[0][1]) &
                                (points[..., 1] > WORK_SPACE[1][0]) & (points[..., 1] < WORK_SPACE[1][1]) &
                                (points[..., 2] > WORK_SPACE[2][0]) & (points[..., 2] < WORK_SPACE[2][1]))]
    
    if debug:
        pcd_visualizer.visualize_pointcloud(points)
    
    return points


def pcd_cluster(points, cfg=pcd_config, debug = False):
    RANDOM_DROP_POINTS = cfg.random_drop_points
    OUTLIER_DISTANCE = cfg.outlier_distance
    OUTLIER_COUNT = cfg.outlier_count
    N_POINTS = cfg.n_points

    # Randomly drop points
    points = points[np.random.choice(points.shape[0], RANDOM_DROP_POINTS, replace=False)]
    points_xyz = points[..., :3]
    
    print(points_xyz.shape)

    # DBSCAN clustering
    bdscan = DBSCAN(eps=OUTLIER_DISTANCE, min_samples=10)
    labels = bdscan.fit_predict(points_xyz)

    # Then get out of the cluster with less than OUTLIER points or noise
    unique_labels, counts = np.unique(labels, return_counts=True)
    outlier_labels = unique_labels[counts < OUTLIER_COUNT]
    if -1 not in outlier_labels:
        outlier_labels = np.append(outlier_labels, -1)

    points = points[~np.isin(labels, outlier_labels) ]
    points_xyz = points[..., :3]
    
    print(points_xyz.shape)

    if debug:
        pcd_visualizer.visualize_pointcloud(points)

    # FPS sampling
    sample_indices = fpsample.bucket_fps_kdline_sampling(points_xyz, N_POINTS, h=3)
    points = points[sample_indices]
    
    print(points.shape)
    if debug:
        pcd_visualizer.visualize_pointcloud(points)

    return points


if __name__ == "__main__":
    id = "f0211830"
    realsense_camera = RealSense_Camera(type="L515", id=id)
    realsense_camera.prepare()
    point_cloud, rgbd_frame = realsense_camera.get_frame()

    preprocess_point_cloud(points=point_cloud, debug=False)
    # save to npy
    np.save("./data/moon.npy", point_cloud)