#include "OriginalRewardGenerator.h"
#include <math.h>
#include "MREAgent.h"
#include <rlglue/utils/C/RLStruct_util.h>

bool is_terminal(const observation_t* oo); 
void ali_init();
double get_reward(const observation_t* oo); 
OriginalRewardGenerator::OriginalRewardGenerator(MREAgent* p)
:RFGeneralizer(p) 
{
	allocateRLStruct(&action, p->taskSpec.num_int_actions, p->taskSpec.num_double_actions, p->taskSpec.charcount_actions); 
	allocateRLStruct(&observation, p->taskSpec.num_int_observations, p->taskSpec.num_double_observations, p->taskSpec.charcount_observations); 

	ali_init(); 
}

bool OriginalRewardGenerator::isTerminal(Observation state)
{
	observation.doubleArray = state; 
	return is_terminal(&observation); 
	
	//warning: 
#define	GOAL_X		80
#define	GOAL_Y		-80
#define	GOAL_THRESHOLD	50
#define	COLLISION_REWARD	-40

	
	double dist = fabs(state[0] - GOAL_X) +  fabs(state[1] - GOAL_Y); 

	if ( dist < GOAL_THRESHOLD)
		 return true; 
	else
		return false;
	
//	return is_terminal(o); 
}

double OriginalRewardGenerator::predict(Observation start, Action a, Observation end)
{
	//we're using this only for the robot
	observation.doubleArray = end; 
	return get_reward(&observation); 

#define	GOAL_X		80
#define	GOAL_Y		-80
#define	GOAL_THRESHOLD	50
#define	COLLISION_REWARD	-40
#define	BALL_RADIUS		85
	
	//is it a collision?
	bool col = false; 
/*	double dist = fabs(end[0] - end[3] ) +	 fabs(end[1] - end[4]) ; 
//	printf("dist from ball is %lf\n", dist); 
	if ( dist < BALL_RADIUS)
		 col = true; 
	else
		col = false; 
*/	
	
	if (isTerminal(end))
	{
//		printf("++++++++++++++++++++++++++ reached the goal ++++++++++++++\n"); 
		return 0;					//reached the goal
	}
	else if (col)
		return COLLISION_REWARD;				//collision
	else
		return -1;					//regular step
	
	
	



/*
	if (MREAgent::environment == PUDDLEWORLD )
	{
		return get_reward(end); 
	}

	if (MREAgent::environment == BUMBLEBALL)
	{
		const double REWARD_MAX = 0; 
		const double REWARD_MIN = -100; 
		const int	GOAL_X=	120; 
		const int	GOAL_Y=	 75;
		const double GOAL_RADIUS = 20; 
		const int   ROBOT_SIZE = 10; 

		if ( fabs((double)end[0] - GOAL_X) + fabs((double)end[1] - GOAL_Y) < GOAL_RADIUS)
			return REWARD_MAX;
		
		//colision
		if ( fabs((double)end[0] - end[3]) + fabs((double)end[1] - end[4]) < ROBOT_SIZE) 
			return REWARD_MIN; 

		return -1; 

	}


	if (MREAgent::environment == ACROBOT)
	{
		double L1 = 1; 
		double L2 = 1; 
		double goal = 1; 
		double  position = -(L1*cos((double)end[0]) + L2*cos((double)end[1]));
		if (position > goal)
			return 0; 
		else
			return -1; 
	}


	if (MREAgent::environment == MOUNTAINCAR_DR)
	{	//for mountain car
		if (end[0]>0.5)
			return 0;
		else 
			return -1; 
	}

	if (MREAgent::environment == MOUNTAINCAR_NS)
	{	//for mountain car
		if (end[0]>0.5)
			if (end[1] > 0.0005)
				return -100; 
			else 
				return 0;
		else 
			return -1; 
	}



	if (MREAgent::environment == CARTPOLE)
	{	//cartPole
		#define PI     3.14159265
		#define car_max_position 2.4   
		#define car_goal_position 0.05 
		#define pole_max_angle 30./180.*PI
		#define pole_goal_angle 3./180.*PI

		#define THETA  0   
		#define THETAP 1   
		#define S      2  
		#define SP     3   

		if(fabs((double)end[THETA]) >= pole_max_angle || fabs((double)end[S]) >= car_max_position)
			return 0;

		if(fabs((double)end[THETA]) <= pole_goal_angle && fabs((double)end[S]) <= car_goal_position)
			return 2;

		return 1;
	}
*/
	return 0; 
}

