#include "Localizer.h"

Localizer::Localizer(ros::NodeHandle &n) : config("config/general.cfg"), ms(n), sm(n), last(0, 0, 0), paviz(n){
	// subscriptions and publications
	subo = n.subscribe("/odom", 1000, &Localizer::readOdom, this);
	subi = n.subscribe("botpose", 10, &Localizer::botPoseCallback, this);
	cost_marker_pub = n.advertise<visualization_msgs::Marker>("visualization_marker", 10);

	// first, get the map from the map-server
	map = ms.getMap();

	/* Initializing the sensor model with the obstacle cost calculated using LNP with a fast marching wavefront*/
	obstaclecost.calculate_obs_cost(map);
	sm.setObstacleCostMap(obstaclecost.cost);

	/**
	 * Initially in the absence of any data, the map->odom is an identity transform
	 * and the poses are distributed uniformly all over the map with all possible poses
	 **/
	map_to_odom = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Point(0, 0, 0));
	ROS_INFO("Waiting for initial pose estimate");
	initialEstimateProvided = false;
}

Localizer::~Localizer() {
	// TODO Auto-generated destructor stub
}

void Localizer::readOdom(nav_msgs::Odometry msg){
	ROS_ERROR("Wrong readOdom being called");
	double x,y,thetas;
	OdomReading current(msg);

	if(last.x == 0 && last.y == 0 && last.theta == 0) last = current;
    bool changed = compareCurrentOdomReadingToNew(last,current);
    if (!changed)
    	return;
    this->localize(last,current);
    last = current;
}

/** Uses the x and y coords before and after the move to decide whether
 * there has been significant motion. Theta is not considered.
 * The threshold Euclidean distance is hardcoded.
 */
bool Localizer::compareCurrentOdomReadingToNew(OdomReading &last,OdomReading &current){
	double distance_moved = last.calculateDistanceTo(current);
	double angle_turned = last.calculateRotation(current);

	double angle_threshold = atof(config.getConfiguration("config.localizer.odometry.angle_threshold").c_str())*PI/180.0f;
	double distance_threshold = atof(config.getConfiguration("config.localizer.odometry.distance_threshold").c_str());

	if( std::abs(angle_turned) > angle_threshold || distance_moved > distance_threshold ){
		return true;
	}
	return false;
}

void Localizer::botPoseCallback(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr &msg){
	/**
	 * Generate a set of nPoses (given in config file) poses around the initial pose
	 * estimate distributed normally in space and query the sensor model for probablity of each pose
	 ***/
	initialEstimateProvided = true;
	PoseCreator.getInitialRobotPoses(msg->pose,pa,map,config);
    sm.estimateWeights(pa, config, w);

	makeMapOdomtransform(msg);
	paviz.publishPoseArray(pa);
}

void Localizer::makeMapOdomtransform(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr &msg){
	tf::Pose pose;
	tf::poseMsgToTF(msg->pose.pose, pose);

	tf::StampedTransform baseInMap;
	try{
		ros::Time now = ros::Time::now();
		listener.waitForTransform("/base_link", "/odom", now, ros::Duration(3.0));
		listener.lookupTransform("/base_link", "/odom", msg->header.stamp, baseInMap);
	}catch(tf::TransformException){
		ROS_WARN("Failed to lookup transform!");
		return;
	}
	map_to_odom = pose * baseInMap;
}

bool Localizer::tooMuchRotation(OdomReading &last,OdomReading &current){
	double distance_moved = last.calculateDistanceTo(current);
	double angle_turned = last.calculateRotation(current);

	if(abs(angle_turned / (distance_moved+0.0001)) > 1.0f ){
		std::cout << "B ";
		return true;
	}
	return false;
}

/**
 * This method implements the KLD algorithm to adapt the size of the sample set
 * It does it in the following way:
 * 1. Initiliaze all bins as empty. The space of (x,y,theta) is discretized into bins
 * 2. Draw an item from X(t-1) according to weights provided by the sensor model
 * 3. I will continue sampling as mentioned above until it fits my criteria.
 */
void Localizer::localize(OdomReading &last, OdomReading &current){
	ROS_INFO("localize");
	bool rotateMajor = tooMuchRotation(last,current);
	om.setOdomMove(last, current);
	int sensorModelOn = atoi(config.getConfiguration("sensorModelOn").c_str());
	if( sensorModelOn ){
//		if(rotateMajor)
//			pf.reSample(pa, w, mm, sm, map, config, om, map_to_odom);
//		else
			map_to_odom = pf.reSample(pa, w, mm, sm, map, config, om, map_to_odom);
	}else{
		/** Run only the Motion model
		 * We don't update the map->odom transform in this case,
		 * as the particle just keep diverging
		 * **/
		PoseArray newPoses;
	    mm.stepRobot(pa, om, config, newPoses,map);
	    //mm.stepHuman(pa,newPoses,map);
    	pa = newPoses;
    }
    paviz.publishPoseArray(pa);
}

/**
 * This function is called in a separate thread and continually
 * publishes a map to odom transform, that is stored in the map_to_odom
 * member variable. When that variable is updated by the localizer,
 * the change in the transform is reflected automatically
 */
void Localizer::publishMapOdomTransform(){
   ros::Rate rate(10);
	while (true) {
//		ROS_INFO("Transform");
		boost::this_thread::interruption_point();
		ros::Time t = ros::Time::now() + transform_tolerance_;
		br.sendTransform(tf::StampedTransform(map_to_odom,t, "/map", "/odom"));
		rate.sleep();
	}
}
