package made.ann;

import game.controllers.PacManController;
import game.core.Game;

import java.util.ArrayList;
import java.util.List;
import java.util.Random;

import made.util.Config;
import made.util.GameStateAnalyzer;
import made.util.GameStateRecorder;
import made.util.Config.ACTION;
import made.util.Config.MODE;
import made.util.GameStateAnalyzer.GameEntity;
import made.util.GameStateAnalyzer.GhostEntity;
import made.util.Helpers;
import made.util.Logger;

/*
 * This is the class you need to modify for your entry. In particular, you need to
 * fill in the getAction() method. Any additional classes you write should either
 * be placed in this package or sub-packages (e.g., game.entries.pacman.mypackage).
 */
public class ANNPacMan implements PacManController, Config
{
    protected NeuralNetwork net;
    protected GameStateAnalyzer _analyzer;
    protected int[] _neuronsPerLayer = { 3, 3, 3 };;
    
    public ANNPacMan()
    {
        net = new NeuralNetwork(_neuronsPerLayer, "ga", 0.3f, 0.9f, 100000);
         net.train();
         Logger.logLine("Testing trained neural network\n");
         net.testOnData();
        _analyzer = new GameStateAnalyzer();
    }
    
	//Place your game logic here to play the game as Ms Pac-Man
	public int getAction(Game currentGame,long timeDue)
	{
	    List<Float> pattern = new ArrayList<Float>(_neuronsPerLayer[0]);
	    int edibleTime = -1;
        int currentPacManIndex = currentGame.getCurPacManLoc();
        GhostEntity _nearestGhost = _analyzer.getNearestGhost(currentGame, currentPacManIndex);
        GameEntity _nearestPill = _analyzer.getNearestPillInc(currentGame, currentPacManIndex);

        if (_nearestGhost.index >= 0)
            edibleTime = currentGame.getEdibleTime(_nearestGhost.id);        
        
        float edibleTimeNorm = _analyzer.getNormalizedEdibleTime(edibleTime);
        float nearestPillDistanceNorm = _analyzer.getNormalizedPathDistance(_nearestPill.pathDistance);
        float nearestGhostDistanceNorm = _analyzer.getNormalizedPathDistance(_nearestGhost.pathDistance);
        
        pattern.add(edibleTimeNorm);
        pattern.add(nearestPillDistanceNorm);
        pattern.add(nearestGhostDistanceNorm);
        
        float[] output = net.getOutput(pattern);
        float maxValue = Float.NEGATIVE_INFINITY;
        int maxIndex = -1;
        
        for (int i = 0; i < _neuronsPerLayer[0]; i++)
        {
            if (output[i] > maxValue)
            {
                maxValue = output[i];
                maxIndex = i;
            }                 
        }
        
        ACTION action = Helpers.toActionEnum(maxIndex);
        
        int move = -1;
        
        switch (action)
        {
        case TO_NEAREST_GHOST:
            move = currentGame.getNextPacManDir(_nearestGhost.index, true, Game.DM.PATH);
            break;
        case FROM_NEAREST_GHOST:
            move = currentGame.getNextPacManDir(_nearestGhost.index, false, Game.DM.PATH);
            break;
        case TO_NEAREST_PILL:
            move = currentGame.getNextPacManDir(_nearestPill.index, true, Game.DM.PATH);
            break;
        default:
            move = currentGame.getCurPacManDir();
        }
        
        return move;
	}
}