package made.ql;

import java.util.ArrayList;
import game.core.Game;
import game.controllers.PacManController;
import made.util.Config;
import made.util.GameStateAnalyzer;
import made.util.Helpers;
import made.util.Logger;
import made.util.GameStateAnalyzer.GameEntity;
import made.util.GameStateAnalyzer.GhostEntity;

public class QLPacMan implements PacManController, Config
{
    protected static final boolean RECORD_STATS = true;
    protected int _score;
    protected int _time;
    protected int _moves;
    protected int _levels;
    protected int _edibleTimeThreshold;
    protected int _fleeNearestGhostThreshold;
    protected int _attackNearestGhostThreshold;
    protected STATE _previousState;
    protected STATE _currentState;
    protected int[] _predicateVariables;
    Game _previousGame;
    protected GameStateAnalyzer _analyzer;
    QLearning _ql;

    // --- PUBLIC CONSTRUCTORS AND METHODS
    public QLPacMan()
    {
        _analyzer = new GameStateAnalyzer();
        _ql = new QLearning(STATE.UNDEFINED.ordinal(), ACTION.UNDEFINED.ordinal());
        _predicateVariables = new int[PREDICATE_VARIABLES.UNDEFINED.ordinal()];
        _edibleTimeThreshold = 18;
        _fleeNearestGhostThreshold = 7;
        _attackNearestGhostThreshold = 80;
    }

    public void init()
    {
        _time = 0;
        _score = 0;
        _moves = 0;
        _levels = 0;
        _previousState = STATE.UNDEFINED;
        _currentState = STATE.UNDEFINED;
        _previousGame = null;
        _ql.init();
    }

    public int getAction(Game currentGame, long timeDue)
    {
        if (_previousGame == null) // First move
            _previousGame = currentGame;

        _analyzer.updateData(_previousGame, currentGame);

        // Decide the current state
        int currentPacManIndex = -1;
        int edibleTime = 0;
        int move = -1;
        _score = currentGame.getScore();
        currentPacManIndex = currentGame.getCurPacManLoc();
        GhostEntity _nearestGhost = _analyzer.getNearestGhost(currentGame, currentPacManIndex);
        GameEntity _nearestPill = _analyzer.getNearestPillInc(currentGame, currentPacManIndex);

        if (_nearestGhost.index >= 0)
            edibleTime = currentGame.getEdibleTime(_nearestGhost.id);

        if (edibleTime < _edibleTimeThreshold)
        {
            if (_nearestGhost.pathDistance < _fleeNearestGhostThreshold)
            {
                _currentState = STATE.ETL_NGC;
            } else
            {
                _currentState = STATE.ETL_NGF;
            }
        } else
        {
            if (_nearestGhost.pathDistance < _attackNearestGhostThreshold)
            {
                _currentState = STATE.ETH_NGC;
            } else
            {
                _currentState = STATE.ETH_NGF;
            }
        }
        
        ACTION nextAction = ACTION.UNDEFINED;

        if (MPM_MODE == MODE.TRAIN)
        {
            // Compute the fitness of the last move
            // int reward = getReward(currentGame, _nearestGhost, _nearestPill);
            int reward = getReward2();

            // Update Q table
            _ql.updateQvalue(_currentState, reward);

            if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
                _ql.printTable();

            // Get the next ("best") action from the Q table
            nextAction = _ql.getNextAction(_currentState);            
        }
        else
        {
            nextAction = _ql.getBestAction(_currentState);
        }

        switch (nextAction)
        {
        case TO_NEAREST_GHOST:
            move = currentGame.getNextPacManDir(_nearestGhost.index, true, Game.DM.PATH);
            break;
        case FROM_NEAREST_GHOST:
            move = currentGame.getNextPacManDir(_nearestGhost.index, false, Game.DM.PATH);
            break;
        case TO_NEAREST_PILL:
            move = currentGame.getNextPacManDir(_nearestPill.index, true, Game.DM.PATH);
            break;
        default:
            move = currentGame.getCurPacManDir();
        }

        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
        {
            // Logger.logLine("Distance to nearest ghost is " +
            // _nearestGhost.pathDistance);
            // Logger.logLine("Distance to nearest pill is " +
            // _nearestPill.pathDistance);
            // Logger.logLine("Previous state was " +
            // _previousState.toString());
            // Logger.logLine("Current state is " + _currentState.toString());
            Logger.logLine("Next action is " + nextAction.toString());
            Logger.logLine("Next move is " + Helpers.directionToString(move));
            Logger.logLine();
        }

        _previousGame = currentGame;
        _previousState = _currentState;
        _moves++;

        return move;
    }

    public int getScore()
    {
        return _score;
    }

    public boolean qLearningDone()
    {
        // boolean blah = _ql.hasConverged(_moves, 1e-6f);
        return _ql.hasConverged(_moves, 1e-6f);
    }

    public void setPredicateVariables(ArrayList<Integer> thresholds)
    {
        for (int i = 0; i < thresholds.size(); i++)
        {
            _predicateVariables[i] = thresholds.get(i);
        }

        // IO.logLine("Decision thresholds set to: " + _edibleTimeThreshold +
        // ", " + _fleeNearestGhostThreshold + ", " +
        // _attackNearestGhostThreshold);
    }

    public void printDeltaQAverage()
    {
        _ql.printDeltaQAverage();
    }

    public void printTable()
    {
        _ql.printTable();
    }

    protected int getReward2()
    {
        int reward = 0;

        if (_analyzer.pacManAtePill())
        {
            reward += 5;
        }

        if (_analyzer.pacManAteGhost())
        {
            reward += 10000;
        }

        if (_analyzer.levelIncreased())
        {
            reward += 50000;
        }

        if (_analyzer.pacManDied())
        {
            reward -= 1000;
        }

        return reward;
    }

    protected int getReward(Game currentGame, GhostEntity currentNearestGhost, GameEntity currentNearestPill)
    {
        int reward = -1000;
        int previousPacManLocation = _previousGame.getCurPacManLoc();
        GhostEntity previousNearestGhost = _analyzer.getNearestGhost(_previousGame, previousPacManLocation);
        GameEntity previousNearestPill = _analyzer.getNearestPillInc(_previousGame, previousPacManLocation);

        if (_previousState == STATE.ETL_NGC)
        {
            int previousPacManFromGhostDir = _previousGame.getNextPacManDir(previousNearestGhost.index, false, Game.DM.PATH);

            if (currentGame.getCurPacManDir() == previousPacManFromGhostDir)
                reward = 50;

            if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
                Logger.logLine("Got PREY_CLOSE reward");
        } else if (_previousState == STATE.ETL_NGF)
        {
            int previousPacManToPillDir = _previousGame.getNextPacManDir(previousNearestPill.index, true, Game.DM.PATH);

            if (currentGame.getCurPacManDir() == previousPacManToPillDir)
                reward = 50;

            if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
                Logger.logLine("Got PREY_FAR reward");
        } else if (_previousState == STATE.ETH_NGC)
        {
            int previousPacManToGhostDir = _previousGame.getNextPacManDir(previousNearestGhost.index, true, Game.DM.PATH);

            if (currentGame.getCurPacManDir() == previousPacManToGhostDir)
                reward = 50;

            if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
                Logger.logLine("Got PREDATOR_CLOSE reward");
        } else if (_previousState == STATE.ETH_NGF)
        {
            int previousPacManToPillDir = _previousGame.getNextPacManDir(previousNearestPill.index, true, Game.DM.PATH);

            if (currentGame.getCurPacManDir() == previousPacManToPillDir)
                reward = 50;

            if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
                Logger.logLine("Got PREDATOR_FAR reward");
        }

        if (MPM_DEBUG_LEVEL.compareTo(DEBUG_LEVEL.NORMAL) > 0)
            Logger.logLine("Computed fitness: " + reward);

        return reward;
    }
};