//controllers.limitdepthfirst.Agent.java
package controllers.limitdepthfirst;

import java.util.ArrayList;

import core.game.Observation;
import core.game.StateObservation;
import core.player.AbstractPlayer;
import ontology.Types;
import tools.ElapsedCpuTimer;
import tools.Vector2d;

public class Agent extends AbstractPlayer {
    protected ArrayList<Observation> grid[][];
    protected int block_size;
    protected ArrayList<Types.ACTIONS> curaction = new ArrayList<Types.ACTIONS>();
    protected ArrayList<StateObservation> explored = new ArrayList<StateObservation>();
    protected ArrayList<StateObservation> globalExplored = new ArrayList<StateObservation>();
    protected ArrayList<Types.ACTIONS> bestAction = new ArrayList<Types.ACTIONS>();
    protected int limit = 4;
    protected double minCost = 100000;

    public Agent(StateObservation so, ElapsedCpuTimer elapsedTimer)
    {
        grid = so.getObservationGrid();
        block_size = so.getBlockSize();
    }

    public double heuristic(StateObservation stateObs)
    {
        ArrayList<Observation>[] fixedPositions = stateObs.getImmovablePositions();
        ArrayList<Observation>[] movingPositions = stateObs.getMovablePositions();
        Vector2d goalpos = fixedPositions[1].get(0).position;
        Vector2d npcpos = stateObs.getAvatarPosition();
        if (stateObs.getAvatarType() != 4)
        {
            Vector2d keypos = movingPositions[0].get(0).position;
            return Math.abs(npcpos.x-keypos.x)+Math.abs(npcpos.y-keypos.y)+Math.abs(keypos.x-goalpos.x)+Math.abs(keypos.y-goalpos.y);
        }
        else return Math.abs(npcpos.x-goalpos.x)+Math.abs(npcpos.y-goalpos.y);
    }

    public int limitdepthfirst(StateObservation stateObs, int limit) {
        if (stateObs.isGameOver()) {
            if (stateObs.getGameWinner() == Types.WINNER.PLAYER_LOSES)
            {
                if (curaction.size()>0) curaction.remove(curaction.size()-1);
                return -1;
            }
            else
            {
                minCost = 0;
                bestAction.add(0, curaction.get(0));
                if (curaction.size()>0) curaction.remove(curaction.size()-1);
                return 0;
            }
        }
        else {
            for (int index = 0; index < explored.size(); index++) {
                if (stateObs.equalPosition(explored.get(index)))
                {
                    if (curaction.size()>0) curaction.remove(curaction.size()-1);
                    return -1;
                }
            }
            if (limit == 0)
            {
                double rate = heuristic(stateObs);
                if (rate<=minCost)
                {
                    minCost = rate;
                    bestAction.add(0, curaction.get(0));
                }
                if (curaction.size()>=0) curaction.remove(curaction.size()-1);
                return -1;
            }
            explored.add(stateObs);
            Types.ACTIONS action = null;
            ArrayList<Types.ACTIONS> actions = stateObs.getAvailableActions();
            for (int index = 0; index < actions.size(); index++) {
                action = actions.get(index);
                StateObservation stCopy = stateObs.copy();
                stCopy.advance(action);
                curaction.add(action);
                int flag = limitdepthfirst(stCopy, limit-1);
                if (flag == 0) {
                    return 0;
                }
                else continue;
            }
            if (curaction.size()>0) curaction.remove(curaction.size()-1);
            explored.remove(explored.size()-1);
            return -1;
        }
    }

    public Types.ACTIONS act(StateObservation stateObs, ElapsedCpuTimer elapsedTimer)
    {
        limitdepthfirst(stateObs, limit-1);
        Types.ACTIONS action = bestAction.get(0);
        curaction.clear();
        minCost=10000;
        bestAction.clear();
        explored.clear();
        return action;
    }
}
