
#ifndef		AGENTPROXY_HEADER
#define		AGENTPROXY_HEADER

#include "RLcommon.h"
#include "C_agent_parser.h"
#include "Util.h"

#include "TFGenerator.h"
#include "RFGeneralizer.h"

#include "types.h"


class Statistics
{
public:
	Statistics(){ age = trialAge = episodeAge = trialEpisode=  0 ; overalReward = trialReward = episodeReward = 0; hasSeenShortEpisode= 0;trial =0;}
	void saveToFile(FILE* fout)
	{
		fprintf(fout,"%d %ld %lf\n", trial, age, overalReward); 
	}
	void loadFromFile(FILE* fin)
	{
		trialAge = episodeAge = 0; 
		trialReward = 0; 
		fscanf(fin,"%d %ld %lf\n",&trial,  &age, &overalReward); 
	}


	long age;					//total number of steps 
	int trial;					//which trial are we in
	long startingTime;			//time when app. started
	long trialAge;				//total number of steps in current trial
	int episodeAge;				//number of steps in the current episode
	int trialEpisode;			//number of episodes in this trial

	double overalReward;	//cumulative reward so far
	double trialReward;		//cumulative reward in the current trial
	double episodeReward;	//cumulative reward in the current episode

	int hasSeenShortEpisode;  //have we seen a good episode? (episode ended with in a goal state)

};

class MDPSolver; 

class AgentProxy
{
public:
	AgentProxy(void);
	~AgentProxy(void);

void init(Task_specification ts);
Action start(Observation o);
Action step(Reward r, Observation o);
void endTrial(Reward r);
void cleanup();
char* getName();


void loadHistory(const char* fname);
/*
void saveHistory(const char* fname);
*/
void loadParam(const char* fname); 

Action mapAction(Action a);
void bringObservation2bound(Observation o); 
void initGeneralizers(); 
void learnGeneralization( Transition* t); 

// Observation related methods
static void printObservation(Observation o); 
char* sprintObservation(Observation o); 
static void copyObservation(Observation o, Observation&); 
static Observation copyObservation(Observation o); 

	Action randomAction();
	void addToHistory(Transition& t); 
	void solveModel(Observation currentState); 
	Action getBestAction(Observation state); 

//void loadHistory(const char* fname); 

public:
	task_spec_struct taskSpec;
	Action lastAction;			//the action that was taken last step
	Observation lastState; 

    char* name; 
	static ENVIRONMENT environment; 
	Statistics stats;

	list<Transition> history; 

TFGenerator * tfGens; 
RFGeneralizer* rfGens; 
MDPSolver* mdpSolver; 

	int mappedAction [25]; 
	int numberOfActions; 
	static int dimension; 
};

#endif
