/**
 * 
 * Copyright (C) 2011 Cody Stoutenburg . All rights reserved.
 *
 *       This program is free software; you can redistribute it and/or
 *       modify it under the terms of the GNU Lesser General Public License
 *       as published by the Free Software Foundation; either version 2.1
 *       of the License, or (at your option) any later version.
 *
 *       This program is distributed in the hope that it will be useful,
 *       but WITHOUT ANY WARRANTY; without even the implied warranty of
 *       MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *       GNU Lesser General Public License for more details.
 *
 *       You should have received a copy of the GNU Lesser General Public License
 *       along with this program; if not, write to the Free Software
 *       Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA. 
 * 
 */
package ca.usherbrooke.behaviour;

import java.util.ArrayList;

import ca.usherbrooke.agent.SimpleAutonomousAgent;
import ca.usherbrooke.agent.action.Action;
import ca.usherbrooke.goal.BasicGoal;
import ca.usherbrooke.thread.Task;
import ca.usherbrooke.thread.TaskGenerateAction;
import ca.usherbrooke.thread.WorkerPool;
import ca.usherbrooke.view.slick.jade.worldView.shared.EntityViewDebug;

/**
 * @author Cody Stoutenburg
 * 
 */
public class MyAutonomousBehaviour extends AutonomousBehaviour {
	private static final long serialVersionUID = 1L;

	private final ArrayList<Action> currentPlan;

	private Action currentAction;

	/**
	 * @param a
	 */
	public MyAutonomousBehaviour(SimpleAutonomousAgent a) {
		super(a);

		currentPlan = new ArrayList<Action>();
		currentAction = null;
	}

	@Override
	protected Long selectGoal() {
		Long goalSelected = null;
		Float max = Float.MIN_VALUE;
		for (BasicGoal goal : this.getAgent().getGoals().getAllGoals()) {
			if (max < goal.getScoreTotal()) {
				goalSelected = goal.getGuid();
				max = goal.getScoreTotal();
			}
		}

		return goalSelected;
	}

	@Override
	@SuppressWarnings("unchecked")
	protected void onGoalSolve() {
		System.out.println("-------------------");
		System.out.println("GOAL IS SOLVE");
		System.out.println("-------------------");
		if (!currentPlan.isEmpty()) {
			ArrayList<Action> clone = (ArrayList<Action>) currentPlan.clone();
			this.getAgent().addMemory(getAgent().getSelectedGoal().getName(),
					clone);
		}
	}

	@Override
	protected void onGoalFail() {
		// we do nothing if the goal is fail
		System.out.println("-------------------");
		System.out.println("GOAL IS FAIL");
		System.out.println("-------------------");
	}

	@Override
	protected void onGoalTerminated() {
		System.out.println("-------------------");
		System.out.println("GOAL IS TERMINATED");
		System.out.println("-------------------");
		EntityViewDebug.INSTANCE.setTargetPosition(this.getAgent().getEntity()
				.getGuid(), null);

		currentAction = null;
		currentPlan.clear();
		this.getAgent().clearCurrentPlan();
		this.getAgent().stopMove();
	}

	Task<Action> task;

	@Override
	protected void tryToSolveGoal() {
		if (currentAction != null && currentAction.actionIsFinish()) {
			// if the currentAction is finish put it at null
			currentAction = null;
		}

		if (task == null && currentAction == null) {
			// if no action and no task to generate action
			// => generate a task to find the new action
			task = new TaskGenerateAction(this.getAgent());
			WorkerPool.INSTANCE.addTask(task);
		}

		if (task != null && task.isFinish() && currentAction == null) {
			// the task is finish and we have a result so we set the new action
			currentAction = task.getResult();

			System.out.println("-------------------");
			System.out.println("TAKE AN ACTION : "
					+ currentAction.getClass().getSimpleName());
			System.out.println("-------------------");

			currentAction.applyAction();

			if (currentAction.canMemorize()) {
				currentPlan.add(currentAction);
			}
			task = null;
		}
	}

	/*
	private void generateRealAction() {
		// first refresh local memory (try to find equipment and
		// entities)
		refreshLocalMemory();

		BasicGoal currentGoal = getAgent().getSelectedGoal();

		// look if the agent already know a plan to solve this problem
		List<Action> plan = getAgent().getMemory(currentGoal.getName());

		// get all values of the goal
		List<Float> values = currentGoal.getValues();

		// add some new values of the world from local memory
		IWorldModel world = getAgent().getWorld();
		for (Long guidObject : infoObjectMemory) {
			EquipmentModel equipment = world.getEquipment(guidObject);
			// we may lost of see the equipment
			if (equipment != null) {
				if (equipment.getHolder() == null) {
					Vector2D pos = equipment.getCenterPosition();
					values.add(pos.getX());
					values.add(pos.getY());
				}
			}
		}

		// verify if the action is finish
		if (currentAction != null && currentAction.actionIsFinish()) {
			EntityViewDebug.INSTANCE.setTargetPosition(getAgent().getEntity()
					.getGuid(), null);
			currentAction = null;
		}
		// if action is finish we need to select another action
		if (currentAction == null) {
			Long before = System.currentTimeMillis();
			currentAction = selectAction(plan, values);
			Long total = System.currentTimeMillis() - before;
			System.out.println("select an action take: " + total + "ms");

			if (currentAction != null) {
				System.out.println("-------------------");
				System.out.println("TAKE AN ACTION : "
						+ currentAction.getClass().getSimpleName());
				System.out.println("-------------------");

				currentAction.applyAction();

				if (currentAction.canMemorize()) {
					currentPlan.add(currentAction);
				}
			} else {
				System.out.println("-------------------");
				System.out.println("ACTION IS NULL");
				System.out.println("-------------------");
			}
		}
	}

	private Action selectAction(List<Action> plan, List<Float> values) {
		Action currentAction = null;
		if (plan != null) {
			// if we have a plan then take the next action of the plan
			currentAction = selectPlanAction(plan, values);
		} else {
			currentAction = generateNewAction(values);
		}
		return currentAction;
	}

	private Action selectPlanAction(List<Action> plan, List<Float> values) {
		Action currentAction = null;

		BasicGoal currentGoal = getAgent().getSelectedGoal();
		if (currentActionInPlan < plan.size()) {
			currentAction = plan.get(currentActionInPlan);
			// need to init the action
			currentActionInPlan++;
			if (currentAction.init(this.getAgent(), values)) {
				// the current action is not valid try next action
				currentAction = selectPlanAction(plan, values);
			}
		} else {
			// the plan does not work!
			this.getAgent().removeMemory(currentGoal.getName());
			currentPlan.clear();
		}

		return currentAction;
	}

	private static Random rand = new Random();

	private Action generateNewAction(List<Float> values) {
		Action currentAction = null;
		List<Action> allAction = Action
				.generateAllActionWithExplorationIfNoOther(this.getAgent(),
						values, 10000L);

		if (allAction.size() == 1) {
			currentAction = allAction.get(0);
		} else if (allAction.size() > 1) {
			// now it's time to select an action between all actions!
			Float totalCost = 0f;
			Float totalCostReverse = 0f;
			Float totalWon = 0f;
			Float[] estimatedCost = new Float[allAction.size()];
			Float[] estimatedCostRevese = new Float[allAction.size()];
			Float[] estimatedWon = new Float[allAction.size()];

			// the action value represent the chance that the action is taken
			Float[] actionProbability = new Float[allAction.size()];

			// 1 calculate the estimated cost to each action
			for (int i = 0; i < allAction.size(); ++i) {
				Float cost = allAction.get(i).generateEstimatedCost();
				estimatedCost[i] = cost;
				totalCost += cost;
			}

			// 2 calculate the proba for cost
			for (int i = 0; i < estimatedCost.length; ++i) {
				estimatedCostRevese[i] = totalCost - estimatedCost[i];
				totalCostReverse += estimatedCostRevese[i];
			}

			// 3 calculate the estimated won to each action
			for (int i = 0; i < allAction.size(); ++i) {
				Float won = allAction.get(i).generateEstimatedWon();
				estimatedWon[i] = won;
				totalWon += won;
			}

			// 4 calculate the probability to choose an action according to
			// it's cost and it's won
			for (int i = 0; i < actionProbability.length; ++i) {
				// the probability is 1-relative cost
				Float probCost = estimatedCostRevese[i] / totalCostReverse;
				Float probWon = estimatedWon[i] / totalWon;

				actionProbability[i] = probCost * 0.3f + probWon * 0.7f;
			}

			// Select the action according to its probability
			Float selected = rand.nextFloat();
			Float beginChance = 0.0f;
			for (int i = 0; i < actionProbability.length; ++i) {
				beginChance += actionProbability[i];

				if (selected <= beginChance) {
					currentAction = allAction.get(i);
					break;
				}
			}
		} else {
			System.out.println("no action generated");
		}
		return currentAction;
	}

	private void refreshLocalMemory() {
		BasicGoal current = getAgent().getSelectedGoal();
		if (current != null) {
			IWorldModel world = this.getAgent().getKnownWorld();// this.getAgent().getWorld();
			List<Float> vals = current.getValues();
			for (Float val : vals) {
				EquipmentModel equipment = world.getEquipment(val.longValue());
				if (equipment != null) {
					infoObjectMemory.add(val.longValue());
				}
			}
		}

	}
	*/
}
