#!/usr/bin/env python
# -*- coding: utf-8 -*-

import rospy
import pandas as pd
import ast, os, time
import numpy as np
from ultralytics import YOLO
import torch
from torch.utils.data import DataLoader, TensorDataset
import cv2
from multiprocessing import shared_memory

from cv_joint_angle.msg import Forearm, Bucket
from mavros_msgs.msg import ExcavatorInclination

class bucketPredict:
    def __init__(self):
        rospy.init_node('bucket_predict', anonymous=True)
        self.forearm_df = pd.DataFrame(columns=['header_stamp', 'forearm'])
        self.inclination_df = pd.DataFrame(columns=['header_stamp', 'boom_deg', 'forearm_deg', 'bucket_deg', 'boom_vel', 'forearm_vel'])
        self.prediction_df = pd.DataFrame(columns=['header_stamp'])
        self.time_stamp_orig = rospy.Time.now()

        rospy.Subscriber('/cv_joint_angle/forearm_apriltag', Forearm, self.forearm_callback)
        rospy.Subscriber('/mavros/excavator_inclination', ExcavatorInclination, self.inclination_callback)
        pub_topic = rospy.get_param('~pub_topic', '/cv_joint_angle/bucket_predict')
        self.angle_pub = rospy.Publisher(pub_topic, Bucket, queue_size=1)
        self.seq = 0

        yolo_weight = rospy.get_param('~yolo_weight', 
                                      "$(find cv_joint_angle)/weights/3i6p_old_800_allp.onnx")
        self.half = rospy.get_param('~half', 'False')
        self.show = rospy.get_param('~show', 'False')
        self.conf = rospy.get_param('~conf', '0.8')
        self.model = YOLO(yolo_weight, task='pose')
        self.classes = ([0, 1, 2] if rospy.get_param('~yolo_classes', 'None') == 'None' 
                        else ast.literal_eval(rospy.get_param('~yolo_classes', 'None')))
        self.target_box = rospy.get_param('~target_box', 'False')
        self.device = 'cpu' if rospy.get_param('~cpu', 'False') == True else '0'
        self.point_search_range = int(rospy.get_param('~point_search_range', '4'))

        self.predict = rospy.get_param('~node_work_method', 'predict') == 'predict'
        RF_RNN_weight = rospy.get_param('~RF_RNN_weight', 
                                      "$(find cv_joint_angle)/weights/runs/RNN/train/best.pt")
        self.RF_RNN_model = torch.jit.load(RF_RNN_weight)
        self.RF_RNN_model.eval()

        while True:
            try :
                    time.sleep(0.5)
                    self.shm_color = shared_memory.SharedMemory(name='color_image')
                    self.shm_depth = shared_memory.SharedMemory(name='depth_image')
                    break
            except Exception as e:
                rospy.logwarn_throttle_identical(10, "Shared image data not found.")

    def forearm_callback(self, msg):
        data = {
            'header_stamp': msg.header.stamp.to_nsec(),
            'forearm': msg.forearm
        }
        self.forearm_df = pd.concat([self.forearm_df, pd.DataFrame([data])], ignore_index=True)
        rospy.loginfo_throttle_identical(360, "Forearm data received and added to dataframe.")

    def inclination_callback(self, msg):
        data = {
            'header_stamp': msg.header.stamp.to_nsec(),
            'boom_deg': msg.boom_deg,
            'forearm_deg': msg.forearm_deg,
            'bucket_deg': msg.bucket_deg,
            'boom_vel': msg.boom_vel,
            'forearm_vel': msg.forearm_vel,
        }
        self.inclination_df = pd.concat([self.inclination_df, pd.DataFrame([data])], ignore_index=True)
        rospy.loginfo_throttle_identical(360, "Inclination data received and added to dataframe.")

    def _get_latest_image_files(self):
        if self.forearm_df.empty or self.inclination_df.empty:return None
        self.time_stamp_orig = rospy.Time.now()
        time_stamp = self.time_stamp_orig.to_nsec()
        color_image = np.ndarray((848, 480, 3), dtype=np.uint8, buffer=self.shm_color.buf)
        depth_image = np.ndarray((848, 480), dtype=np.uint16, buffer=self.shm_depth.buf)
        return color_image, depth_image, time_stamp

    def _yolo_predict(self):
        results = self.model(self.color_image, conf=self.conf, device=self.device, half=self.half, classes=self.classes)
        if self.show:
            image = results[0].plot()
            try:
                if self.show:
                    cv2.imshow("yolov8 inf", image)
                    cv2.waitKey(1)
            except Exception as e:
                pass

        if any(results[0].boxes.cls):
            num = len(results[0].boxes.cls)
            if len(results[0].boxes.cls) > 1:
                keypoints = torch.stack([*results[0].keypoints.xy], dim=1).view(-1, num, 2)
            else:
                keypoints = (results[0].keypoints.xy).unsqueeze(0).view(-1, num, 2)

            boxes = results[0].boxes
            box_info = []
            for c in self.classes:
                if c in boxes.cls:
                    for i in range(len(boxes.cls)):
                        if boxes.cls[i] == c:
                            index = i  
                    x, y, w, h =  boxes.xywh.numpy()[index]
                    xy_depth = self._get_distance(np.array([[[x, y]]]))[0][0][2]
                    box_info.append([x, y, xy_depth,  w, h]) 
                else:
                    box_info.append([0, 0, 0,  0, 0]) 

            keypoints_with_distance = self._get_distance(keypoints.numpy())
            average_values = []
            for sub_array in keypoints_with_distance:
                sub_array_np = np.array(sub_array)
                mean_values = []
                for i in range(sub_array_np.shape[1]):
                    column_data = sub_array_np[:, i]
                    if np.all(column_data == 0):
                        mean_values.append(0)
                    else:
                        mean = np.round(np.mean(column_data[column_data != 0]), 3)
                        mean_values.append(mean)
                average_values.append(mean_values)
            for box in box_info:
                average_values.append(box)
            return average_values if average_values is not None else None
        return None

    def _get_distance(self, kp):
        add_distance_array = np.zeros((kp.shape[0], kp.shape[1], 3), dtype=np.float64)
        for row in range(kp.shape[0]):
            for col in range(kp.shape[1]):
                mid_pos = kp[row, col]
                distance = self._get_point_distance(mid_pos.astype(int))
                add_distance_array[row, col] = np.concatenate((mid_pos, distance))
        return add_distance_array

    def _get_point_distance(self, pixel_coordinates):
        distance_list = np.array([])
        for scope in range(self.point_search_range):
            if (pixel_coordinates[0] + scope < self.depth_image.shape[1] and 
                pixel_coordinates[1] + scope < self.depth_image.shape[0]):
                distance_list = self.depth_image[
                    pixel_coordinates[1]-scope:pixel_coordinates[1]+scope+1,
                    pixel_coordinates[0]-scope:pixel_coordinates[0]+scope+1]
                if np.sum(distance_list) != 0:
                    break
        distance_list = distance_list[distance_list != 0]
        if len(distance_list) >= 4:
            distance_list = np.quantile(distance_list, [0.25, 0.5, 0.75])
        elif len(distance_list) == 0:
            return np.array([0.])
        return np.array([np.round(np.mean(distance_list) * 1000) / 10000])

    def _match_and_interpolate_df(self, df_main, df_other, col_names):
        for col_name in col_names:
            if col_name not in df_main.columns:
                df_main[col_name] = None

        rows_to_delete = []
        for index, header_stamp in enumerate(df_main['header_stamp']):
            if all(pd.notnull(df_main.at[index, col_name]) for col_name in col_names):
                continue

            if header_stamp < df_other['header_stamp'].min():
                rows_to_delete.append(index)
            elif header_stamp < df_other['header_stamp'].max() and header_stamp > df_other['header_stamp'].min():
                for col_name in col_names:
                    if pd.notnull(df_main.at[index, col_name]):
                        continue
                    lower = df_other[df_other['header_stamp'] <= header_stamp].iloc[-1]
                    upper = df_other[df_other['header_stamp'] >= header_stamp].iloc[0]
                    if upper['header_stamp'] == lower['header_stamp']:
                        closest_value = round(lower[col_name], 3)
                    else:
                        closest_value = round(lower[col_name] + (
                            (header_stamp - lower['header_stamp']) / (upper['header_stamp'] - lower['header_stamp'])
                        ) * (upper[col_name] - lower[col_name]), 3)
                    df_main.at[index, col_name] = closest_value

        df_main.drop(index=rows_to_delete, inplace=True)
        df_main.reset_index(drop=True, inplace=True)

    def _match_and_interpolate(self, df_main, df_other, col_names):
        time_stamp = self.time_stamp_orig.to_nsec()

        if time_stamp < df_other['header_stamp'].min():
            df_main.drop(df_main[df_main['header_stamp'] == time_stamp].index, inplace=True)
            df_main.reset_index(drop=True, inplace=True)
            return

        if time_stamp > df_other['header_stamp'].max():
            return

        for col_name in col_names:
            if col_name not in df_main.columns:
                df_main[col_name] = None

            if not pd.isna(df_main.loc[df_main['header_stamp'] == time_stamp, col_name].values[0]):
                continue

            lower = df_other[df_other['header_stamp'] <= time_stamp].iloc[-1]
            upper = df_other[df_other['header_stamp'] >= time_stamp].iloc[0]

            if upper['header_stamp'] == lower['header_stamp']:
                closest_value = round(lower[col_name], 3)
            else:
                closest_value = round(lower[col_name] + (
                    (time_stamp - lower['header_stamp']) / (upper['header_stamp'] - lower['header_stamp'])
                ) * (upper[col_name] - lower[col_name]), 3)

            df_main.loc[df_main['header_stamp'] == time_stamp, col_name] = closest_value

    def _collect_data(self, file_path):
        if self.forearm_df is not None:
            self._match_and_interpolate_df(self.prediction_df, self.forearm_df, ['forearm'])
        if self.inclination_df is not None:
            self._match_and_interpolate_df(
                        self.prediction_df, self.inclination_df, ['boom_deg', 'forearm_deg', 'bucket_deg', 'boom_vel', 'forearm_vel'])

        if len(self.prediction_df) >= 100:
            rows_to_save = self.prediction_df.dropna()
            if not rows_to_save.empty:
                rows_to_save.to_csv(file_path, mode='a', header=not os.path.exists(file_path), index=False)
            self.prediction_df = self.prediction_df[self.prediction_df.isna().any(axis=1)].reset_index(drop=True)
            
            self.forearm_df = self.forearm_df[-150:]
            self.inclination_df = self.inclination_df[-500:]

    def _load_data(self, dataset):
        X = dataset.iloc[[-1]].drop(['header_stamp', 'bucket_deg'], axis=1).values
        y = dataset['bucket_deg'].iloc[-1]
        X = np.array(X, dtype=np.float32)
        y = float(y)
        X_test_tensor = torch.tensor(X, dtype=torch.float32).unsqueeze(0)
        y_test_tensor = torch.tensor(y, dtype=torch.float32).view(-1, 1)
        test_dataset = TensorDataset(X_test_tensor, y_test_tensor)
        return DataLoader(test_dataset, batch_size=1, shuffle=False)

    def _bucket_predict(self):
        self._match_and_interpolate(self.prediction_df, self.forearm_df, ['forearm'])
        self._match_and_interpolate(self.prediction_df, self.inclination_df, ['boom_deg', 'forearm_deg', 'bucket_deg', 'boom_vel', 'forearm_vel'])

        if len(self.prediction_df) > 150:
            self.prediction_df = self.prediction_df[-150:]
            self.forearm_df = self.forearm_df[-150:]
            self.inclination_df = self.inclination_df[-500:]

        fullfill_df = self.prediction_df.dropna()
        test_loader = self._load_data(fullfill_df)
        with torch.no_grad():
            angle = None
            for input, _ in test_loader:
                angle = self.RF_RNN_model(input)
                break
        if angle is not None:
            self.bucket_angle = Bucket()
            self.bucket_angle.header.stamp = self.time_stamp_orig
            self.seq += 1
            self.bucket_angle.header.seq = self.seq
            self.bucket_angle.bucket = angle[0].item()
            self.angle_pub.publish(self.bucket_angle)
        else:
            rospy.logwarn("No data available in test_loader to make predictions.")

    def run(self):
        current_dir = os.path.dirname(os.path.abspath(__file__))
        data_folder = os.path.join(current_dir, '../data')
        os.makedirs(data_folder, exist_ok=True)
        
        # forearm_data_file_path = os.path.join(data_folder, "forearm_data.csv")
        # inclination_data_file_path = os.path.join(data_folder, "inclination_data.csv")
        prediction_data_file_path = os.path.join(data_folder, "prediction_data.csv")

        try:
            rospy.loginfo_once("Bucket predicte node is runing.")
            while not rospy.is_shutdown():
                image_files = self._get_latest_image_files()
                if image_files is not None:
                    self.color_image, self.depth_image, latest_timestamp = image_files
                    rospy.loginfo_throttle_identical(360, "Latest images loaded successfully.")

                    prediction_results = self._yolo_predict()
                    if prediction_results is not None:
                        data = {'header_stamp': latest_timestamp}
                        if len(prediction_results) > 6:
                            for idx, (x, y, z) in enumerate(prediction_results):
                                data[f'kp{idx+1}_x'] = x
                                data[f'kp{idx+1}_y'] = y
                                data[f'kp{idx+1}_z'] = z
                                if idx == 5 :break
                            for idx, (x, y, z, w, h) in enumerate(prediction_results[6:]):
                                data[f'box{idx+1}_x'] = x
                                data[f'box{idx+1}_y'] = y
                                data[f'box{idx+1}_z'] = z
                                data[f'box{idx+1}_w'] = w
                                data[f'box{idx+1}_h'] = h
                            self.prediction_df = pd.concat([self.prediction_df, pd.DataFrame([data])], ignore_index=True)
                            rospy.loginfo_once("Prediction data added to dataframe.")

                            if self.predict:
                                self._bucket_predict()
                            else:
                                self._collect_data(prediction_data_file_path)
                    else:
                        rospy.logwarn_throttle_identical(10, "Yolo output is none")
                else:
                    rospy.logwarn_throttle_identical(10, "Can't get image")

        except KeyboardInterrupt:
            rospy.loginfo("Shutting down gracefully...")

        finally:
            if not (self.prediction_df.empty or self.predict):
                if self.forearm_df is not None:
                    self._match_and_interpolate_df(self.prediction_df, self.forearm_df, ['forearm'])
                if self.inclination_df is not None:
                    self._match_and_interpolate_df(
                            self.prediction_df, self.inclination_df, ['boom_deg', 'forearm_deg', 'bucket_deg', 'boom_vel', 'forearm_vel'])
                
                rows_to_save = self.prediction_df.dropna()
                if not rows_to_save.empty:
                    rows_to_save.to_csv(prediction_data_file_path, mode='a', 
                                                                header=not os.path.exists(prediction_data_file_path), index=False)
                rospy.loginfo("Data saved to CSV files.")
            
            self.shm_color.close()
            self.shm_depth.close()


if __name__ == '__main__':
    predict = bucketPredict()
    predict.run()
