from typing import Callable

import numpy as np
from dora import DoraStatus
from dora_utils import (
    get_extrinsic_matrix,
    get_intrinsic_matrix,
    get_projection_matrix,
    local_points_to_camera_view,
)
from sklearn.neighbors import KNeighborsRegressor
import cv2
import math

import pickle
import open3d

DEPTH_IMAGE_WIDTH = 1920
DEPTH_IMAGE_HEIGHT = 1080
DEPTH_FOV = 90
CAMERA_POSITION = np.array([2.0, 0, 1.0, 0, 0, 0])
INTRINSIC_MATRIX = get_intrinsic_matrix(
    DEPTH_IMAGE_WIDTH, DEPTH_IMAGE_HEIGHT, DEPTH_FOV
)

INV_INTRINSIC_MATRIX = np.linalg.inv(INTRINSIC_MATRIX)
VELODYNE_MATRIX = np.array([[0, 0, 1], [1, 0, 0], [0, -1, 0]])
UNREAL_MATRIX = np.array([[0, 0, 1], [-1, 0, 0], [0, -1, 0]])
INV_UNREAL_MATRIX = np.linalg.inv(UNREAL_MATRIX)
INV_VELODYNE_MATRIX = np.linalg.inv(VELODYNE_MATRIX)


class Predict_obstacle:
    def __init__(self):
        self.kf = {}
        self.dt = 0.1
        self.diff_threshold = 0.5
        self.frames = 0
    def distance_compute(self, point1, point2):
        return math.sqrt(math.pow(point1[0]-point2[0], 2)+math.pow(point1[1]-point2[1], 2)) > self.diff_threshold
    def create_kf(self, label):
        if label not in self.kf:
            kf = cv2.KalmanFilter(4, 2)
            kf.measurementMatrix = np.array([[1, 0, 0, 0], [0, 1, 0, 0]], np.float32) 
            kf.transitionMatrix = np.array([[1, 0, self.dt, 0], [0, 1, 0, self.dt], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32)
            kf.processNoiseCov = np.array([[3, 0, 0, 0], 
                                           [0, 5, 0, 0], 
                                           [0, 0, 0.15, 0], 
                                           [0, 0, 0, 0.25]], np.float32) * 1e-2
            kf.measurementNoiseCov = np.array([[2, 0], [0, 1.5]], np.float32)  * 1e-4

            self.kf[label] = []
            self.kf[label].append({"filter": kf, "frames": 0, "update": False, "points": []})
        else:
            kf = cv2.KalmanFilter(4, 2)
            kf.measurementMatrix = np.array([[1, 0, 0, 0], [0, 1, 0, 0]], np.float32)
            kf.transitionMatrix = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32)
            kf.processNoiseCov = np.array([[3, 0, 0, 0], 
                                           [0, 5, 0, 0], 
                                           [0, 0, 0.15, 0], 
                                           [0, 0, 0, 0.25]], np.float32) * 1e-2
            kf.measurementNoiseCov = np.array([[2, 0], [0, 1.5]], np.float32) * 1e-4
 
            self.kf[label].append({"filter": kf, "frames": 0, "update": False,
                                   "points": [], "cash": [0,0,0]})

    def correct(self, kf, coordX, coordY, coordZ):
        # one by one
        measured = np.array([[np.float32(coordX)], [np.float32(coordY)]])
        kf["filter"].correct(measured)
        kf["frames"] += 1
        kf["points"].append([coordX, coordY, coordZ])
        kf["update"] = True
 
        # two points mean
        # kf["frames"] += 1
        # if kf["frames"] == 1:
        #     measured = np.array([[np.float32(coordX)], [np.float32(coordY)]])
        #     kf["filter"].correct(measured)
        #     kf["points"].append([np.float32(coordX), np.float32(coordY), np.float32(coordZ)])
        #     kf["update"] = True

        # if kf["frames"] % 2 == 0:
        #     pX = 0.5 * (coordX + kf["cash"][0])
        #     pY = 0.5 * (coordY + kf["cash"][1])
        #     pZ = 0.5 * (coordZ + kf["cash"][2])
        #     measured = np.array([[np.float32(pX)], [np.float32(pY)]])
        #     kf["points"].append([np.float32(pX), np.float32(pY), np.float32(pZ)])
        #     kf["filter"].correct(measured)
        #     kf["update"] = True
        # else:
        #     kf["cash"] = [coordX, coordY, coordZ]
            

    def update(self):
        for kf_group in self.kf:
            new_group = []
            for kf in self.kf[kf_group]:
                if kf["update"]:
                    kf["update"] = False
                    new_group.append(kf)
            self.kf[kf_group] = new_group
    def predict(self, label, coordX, coordY, coordZ):
        if label not in self.kf:
            self.create_kf(label)
            kf = self.kf[label][-1]
        else:
            for k in self.kf[label]:
                if not self.distance_compute(k["points"][-1], [coordX, coordY]):
                    kf = k
                    break
            else:
                self.create_kf(label)
                kf = self.kf[label][-1]
        self.correct(kf, coordX, coordY, coordZ)
        
        predict_points = []
        if kf["frames"] > 5:
            z_avg = sum([i[2] for i in kf["points"][-3:]])/3
            for idx in range(20):
                predicted = kf["filter"].predict()
                x, y = predicted[0], predicted[1]
                predict_points.append([float(x), float(y), z_avg, float(predicted[2]), float(predicted[3])])
            # print("prediction: ", label, predict_points)
            return predict_points
        else:
            kf["filter"].predict()
        print("==================kf['frames']: ", kf["frames"])
        return None

# ans in VELODYNE axis
def FormCloud2bbox(cloud_new:np.array,cloud_last:np.array):
    cloud = np.vstack((cloud_new, cloud_last))
    distance = np.linalg.norm(cloud[:,:2], axis = 1)
    condition = (cloud[:, 0] > 0.1) & (cloud[:, 2] < 0.5) & (distance > 3)
    cloud = cloud[condition]
    cloud = np.dot(cloud, VELODYNE_MATRIX) + np.array([0, -1, -2])
    cloud_3d = open3d.geometry.PointCloud()
    cloud_3d.points=open3d.utility.Vector3dVector(cloud[:,:3])
    plane_modle, inliers = cloud_3d.segment_plane(distance_threshold=0.1,ransac_n=15,num_iterations=100)
    unground_cloud=cloud_3d.select_by_index(inliers,invert=True)
    labels=np.array( unground_cloud.cluster_dbscan(eps=1, min_points=10) )
    objects_clouds = []
    objects_center = []
    bboxs = []
    for label in np.unique(labels):
        if label == -1:
            continue
        cluster = unground_cloud.select_by_index(np.where(labels == label)[0])
        bbox = cluster.get_axis_aligned_bounding_box()

        if (bbox.max_bound[0] - bbox.min_bound[0]) < 5 and (bbox.max_bound[1] - bbox.min_bound[1]) < 5:
            objects_center.append(cluster.get_center())
            bboxs.append(bbox)
            objects_clouds.append(cluster)
    res = np.stack([np.concatenate([val1, val2.min_bound, val2.max_bound]) for val1, val2 in zip(objects_center, bboxs)], axis=0)
    return res


def get_predictions(obstacles, obstacle_with_locations):
    """Extracts obstacle predictions out of the message.
    This method is useful to build obstacle predictions when
    the operator directly receives detections instead of predictions.
    The method assumes that the obstacles are static.
    """
    predictions = []
    # Transform the obstacle into a prediction.
    for obstacle, location in zip(obstacles, obstacle_with_locations):
        obstacle = np.append(location, obstacle[-2:])
        predictions.append(obstacle)

    return predictions


class Operator:
    """
    Compute the location of obstacles, given 2D `bbox`, LIDAR point cloud and a position.
    """

    def __init__(self):
        self.point_cloud_full = []
        self.camera_point_cloud_full = []

        self.point_cloud = []
        self.camera_point_cloud = []
        self.camera_point_cloud_left = []
        self.camera_point_cloud_right = []
        self.ground_point_cloud = []
        self.camera_ground_point_cloud = []
        self.last_point_cloud = []
        self.last_camera_point_cloud = []
        self.last_camera_point_cloud_left = []
        self.last_camera_point_cloud_right = []
        self.obstacles = []
        self.obstacles_bbox = []
        self.position = []
        self.lanes = []
        self.kf = Predict_obstacle()
        self.temp = 0
        self.ans = []
        self.point_cloud_in_Lidar_axis = []
        self.last_point_cloud_in_Lidar_axis = []

    def on_event(
        self,
        dora_event: dict,
        send_output: Callable[[str, bytes], None],
    ) -> DoraStatus:
        if dora_event["type"] == "INPUT":
            return self.on_input(dora_event, send_output)
        return DoraStatus.CONTINUE

    def on_input(
        self,
        dora_input: dict,
        send_output: Callable[[str, bytes], None],
    ):
        if "lidar_pc" == dora_input["id"]:
            point_cloud = np.frombuffer(dora_input["data"], np.float32)
            point_cloud = point_cloud.reshape((-1, 3))



            self.last_point_cloud_in_Lidar_axis = self.point_cloud_in_Lidar_axis 
            self.point_cloud_in_Lidar_axis = point_cloud


            # From Velodyne axis to Camera axis
            # from Velodyne axis:
            # x -> forward, y -> right, z -> top
            # to Camera axis:
            # x -> right, y -> bottom, z -> forward
            point_cloud = np.dot(
                point_cloud,
                VELODYNE_MATRIX,
            ) + np.array([0, -1, -2])

            # Forward points only ( forward = z > 0.1 )
            point_cloud = point_cloud[np.where(point_cloud[:, 2] > -5)]

            useful_index = np.where(point_cloud[:, 0] <-1) or np.where(point_cloud[:, 0] >1) or ( np.where(point_cloud[:, 0] < 1 ) and np.where(point_cloud[:, 0] >-1 and np.where(point_cloud[:, 2] > 1.5  )))  
            point_cloud = point_cloud[useful_index]

            # point_cloud_new = np.array([]) #[b, c, d] [c, d]
            # for line in point_cloud:
            #     if not ((line[0] >-1 or line[0] <1) and line[2] <1):
            #         point_cloud_new.append(line)
            # point_cloud = point_cloud_new

            # Remove ground points. Above lidar only ( bottom = y < 1.0 )
            above_ground_point_index = np.where(point_cloud[:, 1] < 1.8)

            self.ground_point_cloud = point_cloud[
                above_ground_point_index == False
            ]

            point_cloud = point_cloud[above_ground_point_index]

            # print("point:  ", point_cloud.shape)
            CENTER2LEFT_MATRIX =  np.array([[0,-1,0], [1 ,0 ,0], [0 ,0 ,1]]) #front caerma to left camera rotmatrix
            CENTER2RIGHT_MATRIX =  np.array([[0,-1,0], [-1 ,0 ,0], [0 ,0 ,1]]) 
            point_cloud_left  = np.dot(point_cloud,CENTER2LEFT_MATRIX)+ np.array([-0.55, -2, -0.42])
            point_cloud_right = np.dot(point_cloud,CENTER2RIGHT_MATRIX)+ np.array([-0.55, -2, -0.42])


            # 3D array -> 2D array with index_x -> pixel x, index_y -> pixel_y, value -> z
            camera_point_cloud = local_points_to_camera_view(
                point_cloud, INTRINSIC_MATRIX
            )

            camera_point_cloud_left = local_points_to_camera_view(
                point_cloud_left, INTRINSIC_MATRIX
            )
            camera_point_cloud_right = local_points_to_camera_view(
                point_cloud_right, INTRINSIC_MATRIX
            )
            # print("========================xxxxxxxxxxxxxxxxxxxxxxxxxx", camera_point_cloud.shape)
            # np.savetxt("camera_point.txt", camera_point_cloud, delimiter=" ")
            self.camera_ground_point_cloud = local_points_to_camera_view(
                self.ground_point_cloud, INTRINSIC_MATRIX
            )
            

            if len(point_cloud) != 0:
                self.last_point_cloud = self.point_cloud
                self.last_camera_point_cloud = self.camera_point_cloud
                self.last_camera_point_cloud_left = self.camera_point_cloud_left
                self.last_camera_point_cloud_right = self.camera_point_cloud_right

                self.camera_point_cloud = camera_point_cloud.T
                self.camera_point_cloud_left = camera_point_cloud_left.T
                self.camera_point_cloud_right = camera_point_cloud_right.T
                self.point_cloud = point_cloud
                if len(self.last_point_cloud) > 0:
                    self.point_cloud_full = np.vstack(
                        (self.last_point_cloud, self.point_cloud)
                    )
                    try:
                        self.camera_point_cloud_full = np.vstack(
                            (self.last_camera_point_cloud, self.camera_point_cloud)
                        )
                    except Exception as e:
                        print(self.last_camera_point_cloud.shape)
                        print(self.camera_point_cloud.shape)
                        print(str(e))

                    self.camera_point_cloud_full_left = np.vstack(
                            (self.last_camera_point_cloud_left, self.camera_point_cloud_left)
                        )
                    self.camera_point_cloud_full_right = np.vstack(
                        (self.last_camera_point_cloud_right, self.camera_point_cloud_right)
                    )
            
                #     self.point_cloud_all= np.vstack((self.point_cloud,self.last_point_cloud))
                #     self.camera_point_cloud_all= np.vstack((self.camera_point_cloud,self.last_camera_point_cloud))
                # else:
                #     self.point_cloud_all= self.point_cloud
                #     self.camera_point_cloud_all= self.camera_point_cloud


        elif "position" == dora_input["id"]:
            # Add sensor transform
            self.position = np.frombuffer(dora_input["data"], np.float32)
            # consider camera relative position to vehicle
            vehicle_T_camera = np.identity(4)
            vehicle_T_camera[:3, 3] = CAMERA_POSITION[:3]
            world_T_vehicle = get_projection_matrix(self.position)
            world_T_camera = np.dot(world_T_vehicle, vehicle_T_camera)
            # get camera extrinsic matrix
            self.extrinsic_matrix = get_extrinsic_matrix(
                world_T_camera
            )

        elif "lanes" == dora_input["id"]:
            lanes = np.frombuffer(dora_input["data"], np.int32).reshape(
                (-1, 60, 2)
            )

            knnr = KNeighborsRegressor(n_neighbors=4)
            knnr.fit(
                self.camera_ground_point_cloud[:, :2], self.ground_point_cloud
            )

            processed_lanes = []
            for lane in lanes:
                lane_location = knnr.predict(lane)
                lane_location = np.array(lane_location)

                lane_location = np.hstack(
                    (
                        lane_location,
                        np.ones((lane_location.shape[0], 1)),
                    )
                )
                lane_location = np.dot(lane_location, self.extrinsic_matrix.T)[
                    :, :3
                ]
                processed_lanes.append(lane_location)
            processed_lanes = np.array(processed_lanes, np.float32).tobytes()

            send_output("global_lanes", processed_lanes, dora_input["metadata"])

        elif "obstacles_bbox" == dora_input["id"]:
            # update whole frames num
            self.kf.frames += 1

            if len(self.position) == 0 or len(self.point_cloud) == 0 or len(self.point_cloud_full)==0: 
                return DoraStatus.CONTINUE
            # try:
            #     print(f"self.point_cloud: {self.point_cloud.shape}, {self.point_cloud[0]}")
            #     # (966487, 3), [  6.11811495 -13.43523693  75.94897461]
            # except:
            #     print(f"show self.point_cloud failed!")
            # bbox = np.array([[min_x, max_x, min_y, max_y, confidence, label], ... n_bbox ... ])
            self.obstacles_bbox = np.frombuffer(
                dora_input["data"], np.int32
            ).reshape((-1, 6))
                
            z_points = []
            obstacles_with_location = []
            for obstacle_bb in self.obstacles_bbox:
                [min_x, max_x, min_y, max_y, confidence, label] = obstacle_bb
    
                # if "obstacles_left" == dora_input["id"] :
                #     z_points = self.point_cloud_full[
                #         np.where(
                #             (self.camera_point_cloud_full_left[:, 0] > min_x)  
                #             & (self.camera_point_cloud_full_left[:, 0] < max_x)
                #             & (self.camera_point_cloud_full_left[:, 1] > min_y)
                #             & (self.camera_point_cloud_full_left[:, 1] < max_y)
                #         )
                #     ]
                # elif "obstacles_right" == dora_input["id"]:
                #     z_points = self.point_cloud_full[
                #         np.where(
                #             (self.camera_point_cloud_full_right[:, 0] > min_x)  
                #             & (self.camera_point_cloud_full_right[:, 0] < max_x)
                #             & (self.camera_point_cloud_full_right[:, 1] > min_y)
                #             & (self.camera_point_cloud_full_right[:, 1] < max_y)
                #         )
                #     ]
                # else :
                z_points = self.point_cloud_full[
                    np.where(
                        (self.camera_point_cloud_full[:, 0] > min_x)  
                        & (self.camera_point_cloud_full[:, 0] < max_x)
                        & (self.camera_point_cloud_full[:, 1] > min_y)
                        & (self.camera_point_cloud_full[:, 1] < max_y)
                    )
                ]

                if len(z_points) > 0:
                    closest_point = z_points[
                        z_points[:, 2].argsort()[int(len(z_points) / 4)]
                    ] # zui jin de shendu
                    if closest_point[2]>1000:
                        print("error-------------------------")
                        print(closest_point)
                    obstacles_with_location.append(closest_point)
            obstacles_prediction_res = []
            # print("==================ans: ", len(self.point_cloud_in_Lidar_axis)," ", len(self.last_point_cloud_in_Lidar_axis))
            if  len(self.point_cloud_in_Lidar_axis) != 0 and len(self.last_point_cloud_in_Lidar_axis) != 0:
                self.ans = FormCloud2bbox(self.point_cloud_in_Lidar_axis,self.last_point_cloud_in_Lidar_axis)
                self.ans[:,0:3] = np.dot( self.ans[:,0:3] , self.extrinsic_matrix.T[0:3,0:3] )
                self.ans[:,3:6] = np.dot( self.ans[:,3:6] , self.extrinsic_matrix.T[0:3,0:3] )
                self.ans[:,6:9] = np.dot( self.ans[:,6:9] , self.extrinsic_matrix.T[0:3,0:3] )
                trans = self.extrinsic_matrix[:3,3]
                for i in range(self.ans.shape[1]):
                    self.ans[:, i] = self.ans[:, i] + trans[i % trans.shape[0]]

            # trans = self.extrinsic_matrix.T[:3,3]
            # print("trans ", trans,self.extrinsic_matrix)
            # print("self.ans: ", self.ans[1,:])

            obstacle_res = []
            if len(obstacles_with_location) > 0:
                obstacles_with_location = np.array(obstacles_with_location)
                obstacles_with_location = np.hstack(
                    (
                        obstacles_with_location,
                        np.ones((obstacles_with_location.shape[0], 1)),
                    )
                )
                obstacles_with_location = np.dot(
                    obstacles_with_location, self.extrinsic_matrix.T
                )[:, :3]
                # print(obstacles_with_location.shape, " shape")
                for o in obstacles_with_location:
                    if o[2]>1000:
                        print("1 error-------------------------")
                        print(o)
            #     # predict v1
            #     for obstacle, location in zip(self.obstacles_bbox, obstacles_with_location):
            #         obstacle = np.append(location, obstacle[-2:])
            #         min_dis = 1000
            #         min_obstacle = [obstacle[0], obstacle[1], obstacle[2]]
            #         for pos_idx in range(len(self.ans)): #[]
            #             pos1 = self.ans[pos_idx][:3]
            #             pos2 = self.ans[pos_idx][3:6]
            #             pos3 = self.ans[pos_idx][6:]
            #             min_dis_temp = min((pow((obstacle[0] - pos1[0]), 2) + pow((obstacle[1] - pos1[1]), 2) + pow((obstacle[2] - pos1[2]), 2)), 
            #             (pow((obstacle[0] - pos2[0]), 2) + pow((obstacle[1] - pos2[1]), 2) + pow((obstacle[2] - pos2[2]), 2)),
            #             (pow((obstacle[0] - pos3[0]), 2) + pow((obstacle[1] - pos3[1]), 2) + pow((obstacle[2] - pos3[2]), 2)))
            #             if min_dis_temp < min_dis and min_dis_temp < 2:
            #                 # print("change obstacle -------------------------------------------")
            #                 min_dis = min_dis_temp
            #                 min_obstacle = pos1
                    
            #         obstacle[:3] = min_obstacle[:3]
            #         # print(obstacle[-1], " ================================min: ", min_obstacle, [obstacle[0], obstacle[1], obstacle[2]])
            #         obstacle_res.append(obstacle)
            #         label  = obstacle[-1]
            #         if label < 6: # filter other obstacle
            #             res = self.kf.predict(obstacle[-1], min_obstacle[0], min_obstacle[1], min_obstacle[2])
            #             if res is not None:
            #                 pos = [i[:3] for i in res]
            #                 v = [i[3:] for i in res]
            #                 prediction_res = {
            #                     "actor_id" : label,
            #                     "actor_pos" : pos,
            #                     "actor_velocity" : v
            #                 }
            #                 obstacles_prediction_res.append(prediction_res)
                            # print("label : ", label)
                        
                predictions = get_predictions(
                    self.obstacles_bbox, obstacles_with_location
                )
                for p in predictions:
                    if p[2]>1000:
                        print("2 error-------------------------")
                        print(p)
                # predictions_bytes = np.array(obstacle_res, np.float32).tobytes()
                predictions_bytes = np.array(obstacles_with_location, np.float32).tobytes()
                # try:
                #     print(f"predictions_bytes: {type(predictions_bytes)}, {np.array(predictions, np.float32).shape}, {np.array(predictions, np.float32)[0]}")
                #     # <class 'bytes'>, (3, 5), [ 75.762566  115.856514    2.2158468  39.          2.       ]
                # except:
                #     print(f"show predictions failed!")
                send_output(
                    "obstacles", np.array(predictions, np.float32).tobytes(), dora_input["metadata"]
                )
            else:
                send_output(
                    "obstacles", np.array([]).tobytes(), dora_input["metadata"]
                )
            
            if len(obstacles_prediction_res) > 0:
                obstacles_prediction_res_bytes = pickle.dumps(obstacles_prediction_res)
                send_output(
                    "obstacles_prediction_res", obstacles_prediction_res_bytes, dora_input["metadata"]
                )
            else:
                send_output(
                    "obstacles_prediction_res", pickle.dumps(np.array([])), dora_input["metadata"]
                )
            
            # if "obstacles_left" == dora_input["id"]:
            #     print("left: ", obstacles_prediction_res_bytes)
            
            # self.kf.update()

        # elif "obstacles_bbox_lidar" == dora_input["id"]:

            # if len(self.position) == 0 or len(self.point_cloud_in_Lidar_axis) == 0:
            #     return DoraStatus.CONTINUE
            #     ans = FormCloud2bbox(self.point_cloud_in_Lidar_axis,self.last_point_cloud_in_Lidar_axis)
            #     send_output("obstacles_bbox_lidar", ans.tobytes(), dora_input["metadata"])
        return DoraStatus.CONTINUE
