/*********************************************************************
*
* Software License Agreement (BSD License)
*
*  Copyright (c) 2020 Shivang Patel
*  All rights reserved.
*
*  Redistribution and use in source and binary forms, with or without
*  modification, are permitted provided that the following conditions
*  are met:
*
*   * Redistributions of source code must retain the above copyright
*     notice, this list of conditions and the following disclaimer.
*   * Redistributions in binary form must reproduce the above
*     copyright notice, this list of conditions and the following
*     disclaimer in the documentation and/or other materials provided
*     with the distribution.
*   * Neither the name of Willow Garage, Inc. nor the names of its
*     contributors may be used to endorse or promote products derived
*     from this software without specific prior written permission.
*
*  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
*  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
*  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
*  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
*  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
*  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
*  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
*  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
*  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
*  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
*  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
*  POSSIBILITY OF SUCH DAMAGE.
*
* Author: Shivang Patel
*
* Reference tutorial:
* https://navigation.ros.org/tutorials/docs/writing_new_nav2planner_plugin.html
*********************************************************************/

#include <cmath>
#include <string>
#include <memory>
#include "nav2_util/node_utils.hpp"
#include "ipa_coverage_planning_plugin/ipa_coverage_planning_plugin.hpp"

namespace ipa_coverage_planning_plugin
{

IpaFullCoveragePath::IpaFullCoveragePath()
        : tf_(nullptr), costmap_(nullptr)
{
}

IpaFullCoveragePath::~IpaFullCoveragePath()
{
    RCLCPP_INFO(
            node_handle_->get_logger(), "Destroying plugin %s of type IpaFullCoveragePath",
            name_.c_str());
}

void IpaFullCoveragePath::configure(
        const rclcpp_lifecycle::LifecycleNode::WeakPtr &parent,
        std::string name, std::shared_ptr<tf2_ros::Buffer> tf,
        std::shared_ptr<nav2_costmap_2d::Costmap2DROS> costmap_ros)
{
    node_handle_ = parent.lock();
    name_ = name;
    tf_ = tf;
    costmap_ = costmap_ros->getCostmap();
    global_frame_ = costmap_ros->getGlobalFrameID();
    cpp_grid_client_ = node_handle_->create_client<nav2_msgs::srv::GetMapNav2>("/map_server/map");
    // Parameter initialization


    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".free_thresh",
                                                 rclcpp::ParameterValue(0.25));
    node_handle_->get_parameter(name_ + ".free_thresh", free_thresh);
    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".occupied_thresh",
                                                 rclcpp::ParameterValue(0.65));
    node_handle_->get_parameter(name_ + ".occupied_thresh", occupied_thresh);

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".robot_radius",
                                                 rclcpp::ParameterValue(0.15));
    node_handle_->get_parameter(name_ + ".robot_radius", robotRadius);
    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".coverage_radius",
                                                 rclcpp::ParameterValue(0.15));
    node_handle_->get_parameter(name_ + ".coverage_radius", coverage_radius);

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".delta_theta_two_yaw",
                                                 rclcpp::ParameterValue(
                                                         0.08726646)); // 5deg= 0.017453292×5=0.08726646rad
    node_handle_->get_parameter(name_ + ".delta_theta_two_yaw", delta_theta_two_yaw);

    // Parameters
    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".room_exploration_algorithm",
                                                 rclcpp::ParameterValue(2));
    node_handle_->get_parameter(name_ + ".room_exploration_algorithm", room_exploration_algorithm_);
    std::cout << "room_exploration/room_exploration_algorithm = " << room_exploration_algorithm_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".display_trajectory",
                                                 rclcpp::ParameterValue(true));
    node_handle_->get_parameter(name_ + ".display_trajectory", display_trajectory_);
    std::cout << "room_exploration/display_trajectory = " << display_trajectory_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".map_correction_closing_neighborhood_size",
                                                 rclcpp::ParameterValue(2));
    node_handle_->get_parameter(name_ + ".map_correction_closing_neighborhood_size",
                                map_correction_closing_neighborhood_size_);
    std::cout << "room_exploration/map_correction_closing_neighborhood_size = "
              << map_correction_closing_neighborhood_size_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".return_path",
                                                 rclcpp::ParameterValue(true));
    node_handle_->get_parameter(name_ + ".return_path", return_path_);
    std::cout << "room_exploration/return_path = " << return_path_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".execute_path",
                                                 rclcpp::ParameterValue(false));
    node_handle_->get_parameter(name_ + ".execute_path", execute_path_);
    std::cout << "room_exploration/execute_path = " << execute_path_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".goal_eps", rclcpp::ParameterValue(0.35));
    node_handle_->get_parameter(name_ + ".goal_eps", goal_eps_);
    std::cout << "room_exploration/goal_eps = " << goal_eps_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".use_dyn_goal_eps",
                                                 rclcpp::ParameterValue(false));
    node_handle_->get_parameter(name_ + ".use_dyn_goal_eps", use_dyn_goal_eps_);
    std::cout << "room_exploration/use_dyn_goal_eps = " << use_dyn_goal_eps_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".interrupt_navigation_publishing",
                                                 rclcpp::ParameterValue(false));
    node_handle_->get_parameter(name_ + ".interrupt_navigation_publishing", interrupt_navigation_publishing_);
    std::cout << "room_exploration/interrupt_navigation_publishing = " << interrupt_navigation_publishing_
              << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".revisit_areas",
                                                 rclcpp::ParameterValue(false));
    node_handle_->get_parameter(name_ + ".revisit_areas", revisit_areas_);
    std::cout << "room_exploration/revisit_areas = " << revisit_areas_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".left_sections_min_area",
                                                 rclcpp::ParameterValue(0.01));
    node_handle_->get_parameter(name_ + ".left_sections_min_area", left_sections_min_area_);
    std::cout << "room_exploration/left_sections_min_area_ = " << left_sections_min_area_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".global_costmap_topic",
                                                 rclcpp::ParameterValue("/global_costmap/costmap"));
    node_handle_->get_parameter(name_ + ".global_costmap_topic", global_costmap_topic_);
    std::cout << "room_exploration/global_costmap_topic = " << global_costmap_topic_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".coverage_check_service_name",
                                                 rclcpp::ParameterValue(
                                                         "/room_exploration/coverage_check_server/coverage_check"));
    node_handle_->get_parameter(name_ + ".coverage_check_service_name", coverage_check_service_name_);
    std::cout << "room_exploration/coverage_check_service_name = " << coverage_check_service_name_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".map_frame_",
                                                 rclcpp::ParameterValue("map"));
    node_handle_->get_parameter(name_ + ".map_frame_", map_frame_);
    std::cout << "room_exploration/map_frame = " << map_frame_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".camera_frame",
                                                 rclcpp::ParameterValue("base_link"));
    node_handle_->get_parameter(name_ + ".camera_frame", camera_frame_);
    std::cout << "room_exploration/camera_frame = " << camera_frame_ << std::endl;

    nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".planning_mode", rclcpp::ParameterValue(1));
    node_handle_->get_parameter(name_ + ".planning_mode", planning_mode_);
    std::cout << "room_exploration/planning_mode_ = " << planning_mode_ << std::endl;

    if (room_exploration_algorithm_ == 1)
        std::cout << "You have chosen the grid exploration method." << std::endl;
    else if (room_exploration_algorithm_ == 2)
        std::cout << "You have chosen the boustrophedon exploration method." << std::endl;
    else if (room_exploration_algorithm_ == 3)
        std::cout << "You have chosen the neural network exploration method." << std::endl;
    else if (room_exploration_algorithm_ == 4)
        std::cout << "You have chosen the convexSPP exploration method." << std::endl;
    else if (room_exploration_algorithm_ == 5)
        std::cout << "You have chosen the flow network exploration method." << std::endl;
    else if (room_exploration_algorithm_ == 6)
        std::cout << "You have chosen the energy functional exploration method." << std::endl;
    else if (room_exploration_algorithm_ == 7)
        std::cout << "You have chosen the voronoi exploration method." << std::endl;
    else if (room_exploration_algorithm_ == 8)
        std::cout << "You have chosen the boustrophedon variant exploration method." << std::endl;

    if (room_exploration_algorithm_ == 1) // get grid point exploration parameters
    {

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".tsp_solver",
                                                     rclcpp::ParameterValue((int) TSP_NEAREST_NEIGHBOR));
        node_handle_->get_parameter(name_ + ".tsp_solver", tsp_solver_);
        std::cout << "room_exploration/tsp_solver = " << tsp_solver_ << std::endl;
        int timeout = 0;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".tsp_solver_timeout",
                                                     rclcpp::ParameterValue(600));
        node_handle_->get_parameter(name_ + ".tsp_solver_timeout", timeout);
        tsp_solver_timeout_ = timeout;
        std::cout << "room_exploration/tsp_solver_timeout = " << tsp_solver_timeout_ << std::endl;
    } else if ((room_exploration_algorithm_ == 2) ||
               (room_exploration_algorithm_ == 8)) // set boustrophedon (variant) exploration parameters
    {
        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".min_cell_area",
                                                     rclcpp::ParameterValue(200.0));
        node_handle_->get_parameter(name_ + ".min_cell_area", min_cell_area_);
        std::cout << "room_exploration/min_cell_area_ = " << min_cell_area_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".path_eps",
                                                     rclcpp::ParameterValue(6.0));
        node_handle_->get_parameter(name_ + ".path_eps", path_eps_);
        std::cout << "room_exploration/path_eps_ = " << path_eps_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".grid_obstacle_offset",
                                                     rclcpp::ParameterValue(0.251));
        node_handle_->get_parameter(name_ + ".grid_obstacle_offset", grid_obstacle_offset_);
        std::cout << "room_exploration/grid_obstacle_offset_ = " << grid_obstacle_offset_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".max_deviation_from_track",
                                                     rclcpp::ParameterValue(0));
        node_handle_->get_parameter(name_ + ".max_deviation_from_track", max_deviation_from_track_);
        std::cout << "room_exploration/max_deviation_from_track_ = " << max_deviation_from_track_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".cell_visiting_order",
                                                     rclcpp::ParameterValue(2));
        node_handle_->get_parameter(name_ + ".cell_visiting_order", cell_visiting_order_);
        std::cout << "room_exploration/cell_visiting_order = " << cell_visiting_order_ << std::endl;
    } else if (room_exploration_algorithm_ == 3) // set neural network explorator parameters
    {
        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".step_size",
                                                     rclcpp::ParameterValue(0.008));
        node_handle_->get_parameter(name_ + ".step_size", step_size_);
        std::cout << "room_exploration/step_size_ = " << step_size_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".A", rclcpp::ParameterValue(17));
        node_handle_->get_parameter(name_ + ".A", A_);
        std::cout << "room_exploration/A_ = " << A_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".A", rclcpp::ParameterValue(17));
        node_handle_->get_parameter(name_ + ".B", B_);
        std::cout << "room_exploration/B_ = " << B_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".D", rclcpp::ParameterValue(7));
        node_handle_->get_parameter(name_ + ".D", D_);
        std::cout << "room_exploration/D_ = " << D_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".E", rclcpp::ParameterValue(80));
        node_handle_->get_parameter(name_ + ".E", E_);
        std::cout << "room_exploration/E_ = " << E_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".mu", rclcpp::ParameterValue(1.03));
        node_handle_->get_parameter(name_ + ".mu", mu_);
        std::cout << "room_exploration/mu_ = " << mu_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".delta_theta_weight",
                                                     rclcpp::ParameterValue(0.15));
        node_handle_->get_parameter(name_ + ".delta_theta_weight", delta_theta_weight_);
        std::cout << "room_exploration/delta_theta_weight_ = " << delta_theta_weight_ << std::endl;
    } else if (room_exploration_algorithm_ == 4) // set convexSPP explorator parameters
    {
        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".cell_size", rclcpp::ParameterValue(0));
        node_handle_->get_parameter(name_ + ".cell_size", cell_size_);
        std::cout << "room_exploration/cell_size_ = " << cell_size_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".delta_theta",
                                                     rclcpp::ParameterValue(0.78539816339));
        node_handle_->get_parameter(name_ + ".delta_theta", delta_theta_);
        std::cout << "room_exploration/delta_theta = " << delta_theta_ << std::endl;
    } else if (room_exploration_algorithm_ == 5) // set flowNetwork explorator parameters
    {
        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".curvature_factor",
                                                     rclcpp::ParameterValue(1.1));
        node_handle_->get_parameter(name_ + ".curvature_factor", curvature_factor_);
        std::cout << "room_exploration/curvature_factor = " << curvature_factor_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".max_distance_factor",
                                                     rclcpp::ParameterValue(1.0));
        node_handle_->get_parameter(name_ + ".max_distance_factor", max_distance_factor_);
        std::cout << "room_exploration/max_distance_factor_ = " << max_distance_factor_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".cell_size", rclcpp::ParameterValue(0));
        node_handle_->get_parameter(name_ + ".cell_size", cell_size_);
        std::cout << "room_exploration/cell_size_ = " << cell_size_ << std::endl;

        nav2_util::declare_parameter_if_not_declared(node_handle_, name_ + ".path_eps",
                                                     rclcpp::ParameterValue(3.0));
        node_handle_->get_parameter(name_ + ".path_eps", path_eps_);
        std::cout << "room_exploration/path_eps_ = " << path_eps_ << std::endl;
    } else if (room_exploration_algorithm_ == 6) // set energyfunctional explorator parameters
    {
    } else if (room_exploration_algorithm_ == 7) // set voronoi explorator parameters
    {
    }

    if (revisit_areas_ == true)
        std::cout << "Areas not seen after the initial execution of the path will be revisited." << std::endl;
    else
        std::cout << "Areas not seen after the initial execution of the path will NOT be revisited." << std::endl;
    fov_points.resize(4);
    if (planning_mode_ == 2)
    {
        fov_points[0].x = 0.04035;        // this field of view represents the off-center iMop floor wiping device
        fov_points[0].y = -0.136;
        fov_points[1].x = 0.04035;
        fov_points[1].y = 0.364;
        fov_points[2].x = 0.54035;        // todo: this definition is mirrored on x (y-coordinates are inverted) to work properly --> check why, make it work the intuitive way
        fov_points[2].y = 0.364;
        fov_points[3].x = 0.54035;
        fov_points[3].y = -0.136;
    }
//	fov_points[0].x = 0.15;		// this field of view fits a Asus Xtion sensor mounted at 0.63m height (camera center) pointing downwards to the ground in a respective angle
//	fov_points[0].y = 0.35;
//	fov_points[1].x = 0.15;
//	fov_points[1].y = -0.35;
//	fov_points[2].x = 1.15;
//	fov_points[2].y = -0.65;
//	fov_points[3].x = 1.15;
//	fov_points[3].y = 0.65;
//	int planning_mode = 2;	// viewpoint planning
    else if (planning_mode_ == 1)
    {
        fov_points[0].x = -0.15;        // this is the working area of a vacuum cleaner with 60 cm width
        fov_points[0].y = 0.15;
        fov_points[1].x = -0.15;
        fov_points[1].y = -0.15;
        fov_points[2].x = 0.15;
        fov_points[2].y = -0.15;
        fov_points[3].x = 0.15;
        fov_points[3].y = 0.15;
    }


}

void IpaFullCoveragePath::cleanup()
{
    RCLCPP_INFO(
            node_handle_->get_logger(), "CleaningUp plugin %s of type IpaFullCoveragePath",
            name_.c_str());
}

void IpaFullCoveragePath::activate()
{
    RCLCPP_INFO(
            node_handle_->get_logger(), "Activating plugin %s of type IpaFullCoveragePath",
            name_.c_str());
}

void IpaFullCoveragePath::deactivate()
{
    RCLCPP_INFO(
            node_handle_->get_logger(), "Deactivating plugin %s of type IpaFullCoveragePath",
            name_.c_str());
}


nav_msgs::msg::Path IpaFullCoveragePath::createPlan(
        const geometry_msgs::msg::PoseStamped &start,
        const geometry_msgs::msg::PoseStamped &goal)
{
    nav_msgs::msg::Path global_path;

    // Checking if the goal and start state is in the global frame
    if (start.header.frame_id != global_frame_)
    {
        RCLCPP_ERROR(
                node_handle_->get_logger(), "Planner will only except start position from %s frame",
                global_frame_.c_str());
        return global_path;
    }

    if (goal.header.frame_id != global_frame_)
    {
        RCLCPP_INFO(
                node_handle_->get_logger(), "Planner will only except goal position from %s frame",
                global_frame_.c_str());
        return global_path;
    }

    global_path.poses.clear();
    global_path.header.stamp = node_handle_->now();
    global_path.header.frame_id = global_frame_;

    /********************** Get grid from server **********************/
    std::vector<std::vector<bool> > grid;
    auto grid_req_srv = std::make_shared<nav2_msgs::srv::GetMapNav2::Request>();
    std::cout << "Requesting grid!!\n\n";
    if (!cpp_grid_client_->wait_for_service(std::chrono::seconds(5)))
    {
        std::cout << "Could not retrieve grid from map_server\n" << std::endl;
        return global_path;
    }
    std::cout << "Requesting grid  success!!\n\n";

    auto result_future = cpp_grid_client_->async_send_request(grid_req_srv);
    auto srv_goal = result_future.get();
    std::cout << "*****Room Exploration action server*****\n" << std::endl;
    // ***************** I. read the given parameters out of the goal *****************
    // todo: this is only correct if the map is not rotated
    const cv::Point2d map_origin(srv_goal->map_origin.position.x, srv_goal->map_origin.position.y);
    const float map_resolution = srv_goal->map_resolution;    // in [m/cell]
    const float map_resolution_inverse = 1. / map_resolution;
    std::cout << "map origin: " << map_origin << " m       map resolution: " << map_resolution << " m/cell"
              << std::endl;
    const int robot_radius_in_pixel = (robotRadius / map_resolution);
    std::cout << "robot radius: " << robotRadius << " m   (" << robot_radius_in_pixel << " px)" << std::endl;
    const cv::Point starting_position((start.pose.position.x - map_origin.x) / map_resolution,
                                      (start.pose.position.y - map_origin.y) / map_resolution);
    std::cout << "starting point: (" << start.pose.position.x << ", " << start.pose.position.y << ") m   ("
              << starting_position << " px)" << std::endl;
    if (planning_mode_ == PLAN_FOR_FOOTPRINT)
        std::cout << "planning mode: planning coverage path with robot's footprint" << std::endl;
    else if (planning_mode_ == PLAN_FOR_FOV)
        std::cout << "planning mode: planning coverage path with robot's field of view" << std::endl;
    // todo: receive map data in nav_msgs::msg::OccupancyGrid format
    cv::Mat room_map;

    if (srv_goal->data_source == srv_goal->IMAGE_PATH)
    {
        std::cout << "data_source: IMAGE_PATH" << std::endl;
        room_map = fromPathGetMat(srv_goal->map_path);
    }
    else if (srv_goal->data_source == srv_goal->IMAGE_DATA)
    {
        // converting the map msg in cv format
        std::cout << "data_source: IMAGE_DATA" << std::endl;
        cv_bridge::CvImagePtr cv_ptr_obj;
        cv_ptr_obj = cv_bridge::toCvCopy(srv_goal->input_map, sensor_msgs::image_encodings::MONO8);
        room_map = cv_ptr_obj->image;
    }
    else if (srv_goal->data_source == srv_goal->MAP_DATA)
    {
        std::cout << "data_source: MAP_DATA" << std::endl;
        mapToMat(srv_goal->map, room_map);
    }
    else
    {
        return global_path;
    }

    // determine room size
    int area_px = 0;        // room area in pixels
    for (int v = 0; v < room_map.rows; ++v)
        for (int u = 0; u < room_map.cols; ++u)
            if (room_map.at<uchar>(v, u) >= 250)
                area_px++;
    std::cout << "area_px= " << area_px << " map_resolution= " << map_resolution
              << " rows= " << room_map.rows << " cols= " << room_map.cols << std::endl;
    std::cout << "### room area = " << area_px * map_resolution * map_resolution << " m^2" << std::endl;

    // closing operation to neglect inaccessible areas and map errors/artifacts
    cv::Mat temp;
    cv::erode(room_map, temp, cv::Mat(), cv::Point(-1, -1), map_correction_closing_neighborhood_size_);
    cv::dilate(temp, room_map, cv::Mat(), cv::Point(-1, -1), map_correction_closing_neighborhood_size_);

    // remove unconnected, i.e. inaccessible, parts of the room (i.e. obstructed by furniture), only keep the room with the largest area

    const bool room_not_empty = removeUnconnectedRoomParts(room_map);
    if (room_not_empty == false)
    {
        std::cout
                << "RoomExplorationServer::exploreRoom: Warning: the requested room is too small for generating exploration trajectories."
                << std::endl;
        return global_path;
    }

    // get the grid size, to check the areas that should be revisited later
    double grid_spacing_in_meter = 0.0;        // is the square grid cell side length that fits into the circle with the robot's coverage radius or fov coverage radius
    float fitting_circle_radius_in_meter = 0;
    Eigen::Matrix<float, 2, 1> fitting_circle_center_point_in_meter;    // this is also considered the center of the field of view, because around this point the maximum radius incircle can be found that is still inside the fov
    std::vector<Eigen::Matrix<float, 2, 1> > fov_corners_meter(4);
    const double fov_resolution = 1000;        // in [cell/meter]
    if (planning_mode_ == PLAN_FOR_FOV) // read out the given fov-vectors, if needed
    {
        // Get the size of one grid cell s.t. the grid can be completely covered by the field of view (fov) from all rotations around it.
        for (int i = 0; i < 4; ++i)
            fov_corners_meter[i] << fov_points[i].x, fov_points[i].y;
        computeFOVCenterAndRadius(fov_corners_meter, fitting_circle_radius_in_meter,
                                  fitting_circle_center_point_in_meter, fov_resolution);
        // get the edge length of the grid square that fits into the fitting_circle_radius
        grid_spacing_in_meter = fitting_circle_radius_in_meter * std::sqrt(2);
    } else // if planning should be done for the footprint, read out the given coverage radius
    {
        grid_spacing_in_meter = coverage_radius * std::sqrt(2);
    }
    // map the grid size to an int in pixel coordinates, using floor method
    const double grid_spacing_in_pixel = grid_spacing_in_meter /
                                         map_resolution;        // is the square grid cell side length that fits into the circle with the robot's coverage radius or fov coverage radius, multiply with sqrt(2) to receive the whole working width
    std::cout << "grid size: " << grid_spacing_in_meter << " m   (" << grid_spacing_in_pixel << " px)" << std::endl;
    // set the cell_size_ for #4 convexSPP explorator or #5 flowNetwork explorator if it is not provided
    if (cell_size_ <= 0)
        cell_size_ = std::floor(grid_spacing_in_pixel);


    // ***************** II. plan the path using the wanted planner *****************
    // todo: consider option to provide the inflated map or the robot radius to the functions instead of inflating with half cell size there
    Eigen::Matrix<float, 2, 1> zero_vector;
    zero_vector << 0, 0;
    std::vector<int> insert_id_;
    std::vector<geometry_msgs::msg::Pose2D> exploration_path;
    if (room_exploration_algorithm_ == 1) // use grid point explorator
    {
        // plan path
        if (planning_mode_ == PLAN_FOR_FOV)
            grid_point_planner.getExplorationPath(room_map, exploration_path, map_resolution, starting_position,
                                                  map_origin, std::floor(grid_spacing_in_pixel), false,
                                                  fitting_circle_center_point_in_meter, tsp_solver_,
                                                  tsp_solver_timeout_);
        else
            grid_point_planner.getExplorationPath(room_map, exploration_path, map_resolution, starting_position,
                                                  map_origin, std::floor(grid_spacing_in_pixel), true, zero_vector,
                                                  tsp_solver_, tsp_solver_timeout_);
    }
    else if (room_exploration_algorithm_ == 2) // use boustrophedon explorator
    {
        // plan path
        if (planning_mode_ == PLAN_FOR_FOV)
            boustrophedon_explorer_.getExplorationPath(room_map, exploration_path, insert_id_, map_resolution,
                                                       starting_position, map_origin, grid_spacing_in_pixel,
                                                       grid_obstacle_offset_, path_eps_, cell_visiting_order_,
                                                       false, fitting_circle_center_point_in_meter, min_cell_area_,
                                                       max_deviation_from_track_);
        else
            boustrophedon_explorer_.getExplorationPath(room_map, exploration_path, insert_id_, map_resolution,
                                                       starting_position, map_origin, grid_spacing_in_pixel,
                                                       grid_obstacle_offset_, path_eps_, cell_visiting_order_, true,
                                                       zero_vector, min_cell_area_, max_deviation_from_track_);
    }
    else if (room_exploration_algorithm_ == 3) // use neural network explorator
    {
        neural_network_explorator_.setParameters(A_, B_, D_, E_, mu_, step_size_, delta_theta_weight_);
        // plan path
        if (planning_mode_ == PLAN_FOR_FOV)
            neural_network_explorator_.getExplorationPath(room_map, exploration_path, map_resolution,
                                                          starting_position, map_origin, grid_spacing_in_pixel,
                                                          false, fitting_circle_center_point_in_meter, false);
        else
            neural_network_explorator_.getExplorationPath(room_map, exploration_path, map_resolution,
                                                          starting_position, map_origin, grid_spacing_in_pixel,
                                                          true, zero_vector, false);
    }
    else if (room_exploration_algorithm_ == 4) // use convexSPP explorator
    {
        // plan coverage path
        if (planning_mode_ == PLAN_FOR_FOV)
            convex_SPP_explorator_.getExplorationPath(room_map, exploration_path, map_resolution, starting_position,
                                                      map_origin, cell_size_, delta_theta_, fov_corners_meter,
                                                      fitting_circle_center_point_in_meter, 0., 7, false);
        else
            convex_SPP_explorator_.getExplorationPath(room_map, exploration_path, map_resolution, starting_position,
                                                      map_origin, cell_size_, delta_theta_, fov_corners_meter,
                                                      zero_vector, coverage_radius, 7, true);
    }
    else if (room_exploration_algorithm_ == 5) // use flow network explorator
    {
        if (planning_mode_ == PLAN_FOR_FOV)
            flow_network_explorator_.getExplorationPath(room_map, exploration_path, map_resolution,
                                                        starting_position, map_origin, cell_size_,
                                                        fitting_circle_center_point_in_meter, grid_spacing_in_pixel,
                                                        false, path_eps_, curvature_factor_, max_distance_factor_);
        else
            flow_network_explorator_.getExplorationPath(room_map, exploration_path, map_resolution,
                                                        starting_position, map_origin, cell_size_, zero_vector,
                                                        grid_spacing_in_pixel, true, path_eps_, curvature_factor_,
                                                        max_distance_factor_);
    }
    else if (room_exploration_algorithm_ == 6) // use energy functional explorator
    {
        if (planning_mode_ == PLAN_FOR_FOV)
            energy_functional_explorator_.getExplorationPath(room_map, exploration_path, map_resolution,
                                                             starting_position, map_origin, grid_spacing_in_pixel,
                                                             false, fitting_circle_center_point_in_meter);
        else
            energy_functional_explorator_.getExplorationPath(room_map, exploration_path, map_resolution,
                                                             starting_position, map_origin, grid_spacing_in_pixel,
                                                             true, zero_vector);
    }
    else if (room_exploration_algorithm_ == 7) // use voronoi explorator
    {
        // create a usable occupancyGrid map out of the given room map
        nav_msgs::msg::OccupancyGrid room_gridmap;
        matToMap(room_gridmap, room_map);

        // do not find nearest pose to starting-position and start there because of issue in planner when starting position is provided
        if (planning_mode_ == PLAN_FOR_FOV)
        {
//			cv::Mat distance_transform;
//			cv::distanceTransform(room_map, distance_transform, cv::DIST_L2, CV_DIST_MASK_PRECISE);
//			cv::Mat display = room_map.clone();
//			// todoo: get max dist from map and parametrize loop
//			for (int s=5; s<100; s+=10)
//			{
//				for (int v=0; v<distance_transform.rows; ++v)
//				{
//					for (int u=0; u<distance_transform.cols; ++u)
//					{
//						if (int(distance_transform.at<float>(v,u)) == s)
//						{
//							display.at<uchar>(v,u) = 0;
//						}
//					}
//				}
//			}
//			cv::imshow("distance_transform", distance_transform);
//			cv::imshow("trajectories", display);
//			cv::waitKey();

            // convert fov-radius to pixel integer
            const int grid_spacing_as_int = (int) std::floor(grid_spacing_in_pixel);
            std::cout << "grid spacing in pixel: " << grid_spacing_as_int << std::endl;

            // create the object that plans the path, based on the room-map
            VoronoiMap vm(room_gridmap.data.data(), room_gridmap.info.width, room_gridmap.info.height,
                          grid_spacing_as_int, 2,
                          true); // a perfect alignment of the paths cannot be assumed here (in contrast to footprint planning) because the well-aligned fov trajectory is mapped to robot locations that may not be on parallel tracks
            // get the exploration path
            std::vector<geometry_msgs::msg::Pose2D> fov_path_uncleaned;
            vm.setSingleRoom(true); //to force to consider all rooms
            vm.generatePath(fov_path_uncleaned, cv::Mat(), starting_position.x,
                            starting_position.y);    // start position in room center

            // clean path from subsequent double occurrences of the same pose
            std::vector<geometry_msgs::msg::Pose2D> fov_path;
            downsampleTrajectory(fov_path_uncleaned, fov_path, 2. * 2.); //5*5);

            // convert to poses with angles
            RoomRotator room_rotation;
            room_rotation.transformPointPathToPosePath(fov_path);

            // map fov-path to robot-path
            //cv::Point start_pos(fov_path.begin()->x, fov_path.begin()->y);
            //mapPath(room_map, exploration_path, fov_path, fitting_circle_center_point_in_meter, map_resolution, map_origin, start_pos);
            printf("Starting to map from field of view pose to robot pose");
            cv::Point robot_starting_position = (fov_path.size() > 0 ? cv::Point(fov_path[0].x, fov_path[0].y)
                                                                     : starting_position);
            cv::Mat inflated_room_map;
            cv::erode(room_map, inflated_room_map, cv::Mat(), cv::Point(-1, -1),
                      (int) std::floor(robotRadius / map_resolution));
            mapPath(inflated_room_map, exploration_path, fov_path, fitting_circle_center_point_in_meter,
                    map_resolution, map_origin, robot_starting_position);
        } else
        {
            // convert coverage-radius to pixel integer
            //int coverage_diameter = (int)std::floor(2.*coverage_radius/map_resolution);
            //std::cout << "coverage radius in pixel: " << coverage_diameter << std::endl;
            const int grid_spacing_as_int = (int) std::floor(grid_spacing_in_pixel);
            std::cout << "grid spacing in pixel: " << grid_spacing_as_int << std::endl;

            // create the object that plans the path, based on the room-map
            VoronoiMap vm(room_gridmap.data.data(), room_gridmap.info.width, room_gridmap.info.height,
                          grid_spacing_as_int, 2,
                          true);    //coverage_diameter-1); // diameter in pixel (full working width can be used here because tracks are planned in parallel motion)
            // get the exploration path
            std::vector<geometry_msgs::msg::Pose2D> exploration_path_uncleaned;
            vm.setSingleRoom(true); //to force to consider all rooms
            vm.generatePath(exploration_path_uncleaned, cv::Mat(), starting_position.x,
                            starting_position.y);    // start position in room center

            // clean path from subsequent double occurrences of the same pose
            downsampleTrajectory(exploration_path_uncleaned, exploration_path, 3.5 * 3.5); //3.5*3.5);

            // convert to poses with angles
            RoomRotator room_rotation;
            room_rotation.transformPointPathToPosePath(exploration_path);

            // transform to global coordinates
            for (size_t pos = 0; pos < exploration_path.size(); ++pos)
            {
                exploration_path[pos].x = (exploration_path[pos].x * map_resolution) + map_origin.x;
                exploration_path[pos].y = (exploration_path[pos].y * map_resolution) + map_origin.y;
            }
        }
    }
    else if (room_exploration_algorithm_ == 8) // use boustrophedon variant explorator
    {
        // plan path
        if (planning_mode_ == PLAN_FOR_FOV)
            boustrophedon_variant_explorer_.getExplorationPath(room_map, exploration_path, insert_id_, map_resolution,
                                                               starting_position, map_origin, grid_spacing_in_pixel,
                                                               grid_obstacle_offset_, path_eps_,
                                                               cell_visiting_order_, false,
                                                               fitting_circle_center_point_in_meter, min_cell_area_,
                                                               max_deviation_from_track_);
        else
            boustrophedon_variant_explorer_.getExplorationPath(room_map, exploration_path, insert_id_, map_resolution,
                                                               starting_position, map_origin, grid_spacing_in_pixel,
                                                               grid_obstacle_offset_, path_eps_,
                                                               cell_visiting_order_, true, zero_vector,
                                                               min_cell_area_, max_deviation_from_track_);
    }
    std::vector<geometry_msgs::msg::PoseStamped> exploration_path_pose_stamped;
    geometry_msgs::msg::PoseStamped PoseStamped_tmp, last_PoseStamped_tmp;
    Eigen::Quaterniond quaternion;
    for (size_t i = 0; i < exploration_path.size(); ++i)
    {
        PoseStamped_tmp.header = global_path.header;
        PoseStamped_tmp.pose.position.x = exploration_path[i].x;
        PoseStamped_tmp.pose.position.y = exploration_path[i].y;
        PoseStamped_tmp.pose.position.z = 0.;
        quaternion = Eigen::AngleAxisd((double) exploration_path[i].theta, Eigen::Vector3d::UnitZ());
        PoseStamped_tmp.pose.orientation.set__x(quaternion.x());
        PoseStamped_tmp.pose.orientation.set__y(quaternion.y());
        PoseStamped_tmp.pose.orientation.set__z(quaternion.z());
        PoseStamped_tmp.pose.orientation.set__w(quaternion.w());
        if (i > 0 && fabs(exploration_path[i].theta - exploration_path[i - 1].theta) > delta_theta_two_yaw)
        {
            quaternion = Eigen::AngleAxisd((double) exploration_path[i].theta, Eigen::Vector3d::UnitZ());
            last_PoseStamped_tmp.pose.orientation.set__x(quaternion.x());
            last_PoseStamped_tmp.pose.orientation.set__y(quaternion.y());
            last_PoseStamped_tmp.pose.orientation.set__z(quaternion.z());
            last_PoseStamped_tmp.pose.orientation.set__w(quaternion.w());
            exploration_path_pose_stamped.push_back(last_PoseStamped_tmp);
        }
        exploration_path_pose_stamped.push_back(PoseStamped_tmp);
        last_PoseStamped_tmp = PoseStamped_tmp;
    }
    global_path.poses = exploration_path_pose_stamped;
    return global_path;
}

// Function that provides the functionality that a given fov path gets mapped to a robot path by using the given parameters.
// To do so simply a vector operation is applied. If the computed robot pose is not in the free space, another accessible
// point is generated by finding it on the radius around the fov middlepoint s.t. the distance to the last robot position
// is minimized.
// Important: the room map needs to be an unsigned char single channel image, if inaccessible areas should be excluded, provide the inflated map
// robot_to_fov_vector in [m]
void IpaFullCoveragePath::mapPath(const cv::Mat &room_map, std::vector<geometry_msgs::msg::Pose2D> &robot_path,
                                  const std::vector<geometry_msgs::msg::Pose2D> &fov_path,
                                  const Eigen::Matrix<float, 2, 1> &robot_to_fov_vector,
                                  const double map_resolution, const cv::Point2d map_origin,
                                  const cv::Point &starting_point)
{
    // initialize helper classes
    MapAccessibilityAnalysis map_accessibility;
    AStarPlanner path_planner;
    const double map_resolution_inv = 1.0 / map_resolution;

    // initialize the robot position in accessible space to enable the Astar planner to find a path from the beginning
    cv::Point robot_pos(starting_point.x, starting_point.y);
//	std::vector<MapAccessibilityAnalysis::Pose> accessible_start_poses_on_perimeter;
//	map_accessibility.checkPerimeter(accessible_start_poses_on_perimeter, fov_center, fov_radius_pixel, PI/64., room_map, false, robot_pos);

    // map the given robot to fov vector into pixel coordinates
    Eigen::Matrix<float, 2, 1> robot_to_fov_vector_pixel;
    robot_to_fov_vector_pixel << robot_to_fov_vector(0, 0) * map_resolution_inv, robot_to_fov_vector(1, 0) *
                                                                                 map_resolution_inv;
    const double fov_radius_pixel = robot_to_fov_vector_pixel.norm();
    const double fov_to_front_offset_angle = atan2((double) robot_to_fov_vector(1, 0),
                                                   (double) robot_to_fov_vector(0, 0));
    std::cout << "mapPath: fov_to_front_offset_angle: " << fov_to_front_offset_angle << "rad ("
              << fov_to_front_offset_angle * 180. / M_PI << "deg)" << std::endl;
    std::cout << "fov_radius_pixel: " << fov_radius_pixel << "      robot_to_fov_vector: "
              << robot_to_fov_vector(0, 0) << ", " << robot_to_fov_vector(1, 0) << std::endl;

    // go trough the given poses and calculate accessible robot poses
    // first try with A*, if this fails, call map_accessibility_analysis and finally try a directly computed pose shift
    int found_with_astar = 0, found_with_map_acc = 0, found_with_shift = 0, not_found = 0;
    for (std::vector<geometry_msgs::msg::Pose2D>::const_iterator pose = fov_path.begin();
         pose != fov_path.end(); ++pose)
    {
        bool found_pose = false;

        // 1. try with map_accessibility_analysis
        // compute accessible locations on perimeter around target fov center
        MapAccessibilityAnalysis::Pose fov_center(pose->x, pose->y, pose->theta);
        std::vector<MapAccessibilityAnalysis::Pose> accessible_poses_on_perimeter;
        map_accessibility.checkPerimeter(accessible_poses_on_perimeter, fov_center, fov_radius_pixel, PI / 64.,
                                         room_map, false, robot_pos);

        //std::cout << "  fov_center: " << fov_center.x << ", " << fov_center.y << ", " << fov_center.orientation << "           accessible_poses_on_perimeter.size: " << accessible_poses_on_perimeter.size() << std::endl;

        if (accessible_poses_on_perimeter.size() != 0)
        {
            // todo: also consider complete visibility of the fov_center (or whole cell) as a selection criterion
            // todo: extend with a complete consideration of the exact robot footprint
            // go trough the found accessible positions and take the one that minimizes the angle between approach vector and robot heading direction at the target position
            // and which lies in the half circle around fov_center which is "behind" the fov_center pose's orientation
//			double max_cos_alpha = -10;
            std::map<double, MapAccessibilityAnalysis::Pose, std::greater<double> > cos_alpha_to_perimeter_pose_mapping;        // maps (positive) cos_alpha to their perimeter poses
            MapAccessibilityAnalysis::Pose best_pose;
            //std::cout << "Perimeter: \n robot_pos = " << robot_pos.x << ", " << robot_pos.y << "     fov_center = " << fov_center.x << ", " << fov_center.y << "\n";
            for (std::vector<MapAccessibilityAnalysis::Pose>::iterator perimeter_pose = accessible_poses_on_perimeter.begin();
                 perimeter_pose != accessible_poses_on_perimeter.end(); ++perimeter_pose)
            {
                // exclude positions that are ahead of the moving direction
                //cv::Point2d heading = cv::Point2d(fov_center.x, fov_center.y) - cv::Point2d(perimeter_pose->x, perimeter_pose->y);
                //const double heading_norm = sqrt((double)heading.x*heading.x+heading.y*heading.y);
                perimeter_pose->orientation -= fov_to_front_offset_angle; // robot heading correction of off-center fov
                const cv::Point2d perimeter_heading = cv::Point2d(cos(perimeter_pose->orientation),
                                                                  sin(perimeter_pose->orientation));
                const double perimeter_heading_norm = 1.;
                const cv::Point2d fov_center_heading = cv::Point2d(cos(fov_center.orientation),
                                                                   sin(fov_center.orientation));
                const double fov_center_heading_norm = 1.;
                const double cos_alpha =
                        (fov_center_heading.x * perimeter_heading.x + fov_center_heading.y * perimeter_heading.y) /
                        (fov_center_heading_norm * perimeter_heading_norm);
                //std::cout << "  cos_alpha: " << cos_alpha << std::endl;
//				if (cos_alpha < 0)
//					continue;
                if (cos_alpha >= 0.)
                    cos_alpha_to_perimeter_pose_mapping[cos_alpha] = *perimeter_pose;        // rank by cos(angle) between approach direction and viewing direction

                // rank by cos(angle) between approach direction and viewing direction
                //cv::Point2d approach = cv::Point2d(perimeter_pose->x, perimeter_pose->y) - cv::Point2d(robot_pos.x, robot_pos.y);
                //const double approach_norm = sqrt(approach.x*approach.x+approach.y*approach.y);
//				double cos_alpha = 1.;		// only remains 1.0 if robot_pose and perimeter_pose are identical
//				if (fov_center_heading.x!=0 || fov_center_heading.y!=0)	// compute the cos(angle) between approach direction and viewing direction
//					cos_alpha = (fov_center_heading.x*perimeter_heading.x + fov_center_heading.y*perimeter_heading.y)/(fov_center_heading_norm*perimeter_heading_norm);
                //std::cout << " - perimeter_pose = " << perimeter_pose->x << ", " << perimeter_pose->y << "     cos_alpha = " << cos_alpha << "   max_cos_alpha = " << max_cos_alpha << std::endl;
//				if(cos_alpha>max_cos_alpha)
//				{
//					max_cos_alpha = cos_alpha;
//					best_pose = *perimeter_pose;
//					found_pose = true;
//				}
            }
//			std::cout << "  cos_alpha_to_perimeter_pose_mapping.size: " << cos_alpha_to_perimeter_pose_mapping.size() << std::endl;
            if (cos_alpha_to_perimeter_pose_mapping.size() > 0)
            {
                // rank by cos(angle) between approach direction and viewing direction
                double max_cos_alpha = cos_alpha_to_perimeter_pose_mapping.begin()->first;
                double closest_dist = std::numeric_limits<double>::max();
                for (std::map<double, MapAccessibilityAnalysis::Pose, std::greater<double> >::iterator it = cos_alpha_to_perimeter_pose_mapping.begin();
                     it != cos_alpha_to_perimeter_pose_mapping.end(); ++it)
                {
//					std::cout << "    cos_alpha: " << it->first << std::endl;
                    // only consider the best fitting angles
                    if (it->first < 0.95 * max_cos_alpha)
                        break;
                    // from those select the position with shortest approach path from current position
                    const double dist = cv::norm(robot_pos - cv::Point(it->second.x, it->second.y));
                    if (dist < closest_dist)
                    {
                        closest_dist = dist;
                        best_pose = it->second;
                        found_pose = true;
                    }
                }
//				std::cout << "    closest_dist: " << closest_dist << "    best_pose: " << best_pose.x << ", " << best_pose.y << ", " << best_pose.orientation << std::endl;
            }

            // add pose to path and set robot position to it
            if (found_pose == true)
            {
                geometry_msgs::msg::Pose2D best_pose_msg;
                best_pose_msg.x = best_pose.x * map_resolution + map_origin.x;
                best_pose_msg.y = best_pose.y * map_resolution + map_origin.y;
                best_pose_msg.theta = best_pose.orientation;
                robot_path.push_back(best_pose_msg);
                robot_pos = cv::Point(cvRound(best_pose.x), cvRound(best_pose.y));
                //std::cout << " best_pose = " << best_pose.x << ", " << best_pose.y << "      max_cos_alpha = " << max_cos_alpha << std::endl;
                ++found_with_map_acc;
            }
        }

        // 2. if no accessible pose was found, try with a directly computed pose shift
        if (found_pose == false)
        {
            // get the rotation matrix
            const float sin_theta = std::sin(pose->theta);
            const float cos_theta = std::cos(pose->theta);
            Eigen::Matrix<float, 2, 2> R;
            R << cos_theta, -sin_theta, sin_theta, cos_theta;

            // calculate the resulting rotated relative vector and the corresponding robot position
            Eigen::Matrix<float, 2, 1> v_rel_rot = R * robot_to_fov_vector_pixel;
            Eigen::Matrix<float, 2, 1> robot_position;
            robot_position << pose->x - v_rel_rot(0, 0), pose->y - v_rel_rot(1, 0);

            // check the accessibility of the found point
            geometry_msgs::msg::Pose2D current_pose;
            if (robot_position(0, 0) >= 0 && robot_position(1, 0) >= 0 && robot_position(0, 0) < room_map.cols &&
                robot_position(1, 0) < room_map.rows &&
                room_map.at<uchar>((int) robot_position(1, 0), (int) robot_position(0, 0)) ==
                255) // position accessible
            {
                current_pose.x = (robot_position(0, 0) * map_resolution) + map_origin.x;
                current_pose.y = (robot_position(1, 0) * map_resolution) + map_origin.y;
                current_pose.theta = pose->theta;
                found_pose = true;
                robot_path.push_back(current_pose);

                // set robot position to computed pose s.t. further planning is possible
                robot_pos = cv::Point((int) robot_position(0, 0), (int) robot_position(1, 0));

                ++found_with_shift;
            }
        }

        if (found_pose == false)
        {
            // 3. if still no accessible position was found, try with computing the A* path from robot position to fov_center and stop at the right distance
            // get vector from current position to desired fov position
            cv::Point fov_position(pose->x, pose->y);
            std::vector<cv::Point> astar_path;
            path_planner.planPath(room_map, robot_pos, fov_position, 1.0, 0.0, map_resolution, 0, &astar_path);

            // find the point on the astar path that is on the viewing circle around the fov middlepoint
            cv::Point accessible_position;
            for (std::vector<cv::Point>::iterator point = astar_path.begin(); point != astar_path.end(); ++point)
            {
                if (cv::norm(*point - fov_position) <= fov_radius_pixel)
                {
                    accessible_position = *point;
                    found_pose = true;
                    break;
                }
            }

            // add pose to path and set robot position to it
            if (found_pose == true)
            {
                // get the angle s.t. the pose points to the fov middlepoint and save it
                geometry_msgs::msg::Pose2D current_pose;
                current_pose.x = (accessible_position.x * map_resolution) + map_origin.x;
                current_pose.y = (accessible_position.y * map_resolution) + map_origin.y;
                current_pose.theta = std::atan2(pose->y - accessible_position.y, pose->x - accessible_position.x) -
                                     fov_to_front_offset_angle; // todo: check -fov_to_front_offset_angle
                robot_path.push_back(current_pose);
                // set robot position to computed pose s.t. further planning is possible
                robot_pos = accessible_position;
                ++found_with_astar;
            }
        }

        if (found_pose == false)
        {
            ++not_found;
            std::cout << "  not found." << std::endl;
        }

//		testing
//		std::cout << robot_pos << ", " << cv::Point(pose->x, pose->y) << std::endl;
//		cv::Mat room_copy = room_map.clone();
//		cv::line(room_copy, robot_pos, cv::Point(pose->x, pose->y), cv::Scalar(127), 1);
//		cv::circle(room_copy, robot_pos, 2, cv::Scalar(100), cv::FILLED);
//		cv::imshow("pose", room_copy);
//		cv::waitKey();

//		if (robot_path.size()>0)
//			std::cout << "  robot_pos: " << robot_path.back().x << ", " << robot_path.back().y << ", " << robot_path.back().theta << std::endl;
    }
    std::cout << "Found with map_accessibility: " << found_with_map_acc << ",   with shift: " << found_with_shift
              << ",   with A*: " << found_with_astar << ",   not found: " << not_found << std::endl;
}

// remove unconnected, i.e. inaccessible, parts of the room (i.e. obstructed by furniture), only keep the room with the largest area
bool IpaFullCoveragePath::removeUnconnectedRoomParts(cv::Mat &room_map)
{
    // create new map with segments labeled by increasing labels from 1,2,3,...
    cv::Mat room_map_int(room_map.rows, room_map.cols, CV_32SC1);
    for (int v = 0; v < room_map.rows; ++v)
    {
        for (int u = 0; u < room_map.cols; ++u)
        {
            if (room_map.at<uchar>(v, u) == 255)
                room_map_int.at<int32_t>(v, u) = -100;
            else
                room_map_int.at<int32_t>(v, u) = 0;
        }
    }

    std::map<int, int> area_to_label_map;    // maps area=number of segment pixels (keys) to the respective label (value)
    int label = 1;
    for (int v = 0; v < room_map_int.rows; ++v)
    {
        for (int u = 0; u < room_map_int.cols; ++u)
        {
            if (room_map_int.at<int32_t>(v, u) == -100)
            {
                const int area = cv::floodFill(room_map_int, cv::Point(u, v), cv::Scalar(label), 0, 0, 0,
                                               8 | cv::FLOODFILL_FIXED_RANGE);
                area_to_label_map[area] = label;
                ++label;
            }
        }
    }
    // abort if area_to_label_map.size() is empty
    if (area_to_label_map.size() == 0)
        return false;

    // remove all room pixels from room_map which are not accessible
    const int label_of_biggest_room = area_to_label_map.rbegin()->second;
    std::cout << "label_of_biggest_room=" << label_of_biggest_room << std::endl;
    for (int v = 0; v < room_map.rows; ++v)
        for (int u = 0; u < room_map.cols; ++u)
            if (room_map_int.at<int32_t>(v, u) != label_of_biggest_room)
                room_map.at<uchar>(v, u) = 0;

    return true;
}

void IpaFullCoveragePath::downsampleTrajectory(const std::vector<geometry_msgs::msg::Pose2D> &path_uncleaned,
                                               std::vector<geometry_msgs::msg::Pose2D> &path,
                                               const double min_dist_squared)
{
    // clean path from subsequent double occurrences of the same pose
    path.push_back(path_uncleaned[0]);
    cv::Point last_added_point(path_uncleaned[0].x, path_uncleaned[0].y);
    for (size_t i = 1; i < path_uncleaned.size(); ++i)
    {
        const cv::Point current_point(path_uncleaned[i].x, path_uncleaned[i].y);
        cv::Point vector = current_point - last_added_point;
        if (vector.x * vector.x + vector.y * vector.y > min_dist_squared || i == path_uncleaned.size() - 1)
        {
            path.push_back(path_uncleaned[i]);
            last_added_point = current_point;
        }
    }
}

cv::Mat IpaFullCoveragePath::fromPathGetMat(std::string image_path)
{
    cv::Mat map_flipped = cv::imread(image_path, 0);
    cv::Mat map;
    cv::flip(map_flipped, map, 0);
    //make non-white pixels black
    for (int y = 0; y < map.rows; y++)
    {
        for (int x = 0; x < map.cols; x++)
        {
            //find not reachable regions and make them black
            if (map.at<unsigned char>(y, x) < 250)
            {
                map.at<unsigned char>(y, x) = 0;
            }
                //else make it white
            else
            {
                map.at<unsigned char>(y, x) = 255;
            }
        }
    }
    std::cout << "map-size: " << map.rows << "x" << map.cols << std::endl;
    sensor_msgs::msg::Image labeling;
    cv_bridge::CvImage cv_image;
    cv_image.encoding = "mono8";
    cv_image.image = map;
    cv_image.toImageMsg(labeling);
    cv_bridge::CvImagePtr cv_ptr_obj;
    cv_ptr_obj = cv_bridge::toCvCopy(labeling, sensor_msgs::image_encodings::MONO8);
    return cv_ptr_obj->image;
}

// function to create an occupancyGrid map out of a given cv::Mat
void IpaFullCoveragePath::matToMap(nav_msgs::msg::OccupancyGrid &map, const cv::Mat &mat)
{
    map.info.width = mat.cols;
    map.info.height = mat.rows;
    map.data.resize(mat.cols * mat.rows);

    for (int x = 0; x < mat.cols; x++)
        for (int y = 0; y < mat.rows; y++)
            map.data[y * mat.cols + x] = mat.at<int8_t>(y, x) ? 0 : 100;
}

// function to create a cv::Mat out of a given occupancyGrid map
void IpaFullCoveragePath::mapToMat(const nav_msgs::msg::OccupancyGrid &map, cv::Mat &mat)
{
    mat = cv::Mat(map.info.height, map.info.width, CV_8U);
    int free_thresh_int = std::rint(free_thresh * 100.0);
    int occupied_thresh_int = std::rint(occupied_thresh * 100.0);
    double map_cell;
    int8_t value;
    for (int x = 0; x < mat.cols; x++)
        for (int y = 0; y < mat.rows; y++)
        {
            map_cell = map.data[y * mat.cols + x];
            if (map_cell < 0 || 100 < map_cell)
            {
                value = 0; //205
            } else if (map_cell <= free_thresh_int)
            {
                value = 255;
            } else if (occupied_thresh_int <= map_cell)
            {
                value = 0;
            } else
            {
                value = 205;  //
            }
            mat.at<int8_t>(y, x) = value;
        }
}


}  // namespace ipa_coverage_planning_plugin

#include "pluginlib/class_list_macros.hpp"

PLUGINLIB_EXPORT_CLASS(ipa_coverage_planning_plugin::IpaFullCoveragePath, nav2_core::GlobalPlanner)
