package pacman.entries.pacman;

import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Random;
import java.util.concurrent.ArrayBlockingQueue;

import com.sun.org.apache.bcel.internal.generic.NEW;

import pacman.controllers.Controller;
import pacman.game.Constants.DM;
import pacman.game.Constants.GHOST;
import pacman.game.Constants.MOVE;
import pacman.game.Game;

/*
 * This is the class you need to modify for your entry. In particular, you need to
 * fill in the getAction() method. Any additional classes you write should either
 * be placed in this package or sub-packages (e.g., game.entries.pacman.mypackage).
 */
public class OldLearningPacMan extends Controller<MOVE>
{
	public OldLearningPacMan() {
		super();
		InitTestWeights();
	}
	
	private MOVE m_Move=MOVE.NEUTRAL;
	
	public MOVE getMove(Game game, long timeDue) 
	{
		//Place your game logic here to play the game as Ms Pac-Man
		ChooseMove(game);
		
		return m_Move;
	}

	// There are N features and M behaviors.
	
	final static int N = 2;
	final static int M = 2;
	
	// ______________________________________________ GENETIC LEARNING

	public int GetNumFeatures() { return N; }
	public int GetNumBehaviors() { return M; }
	
	private static Random random = new Random();
	
	public static ArrayList<OldLearningPacMan> Generation(ArrayList<OldLearningPacMan> parents, 
			int count, float randomness) {
		
		ArrayList<OldLearningPacMan> generation = new ArrayList<OldLearningPacMan>(count);
		
		float halfRandomness = randomness/2.0f;
		
		for (int c = 0; c < count; c++) {

			OldLearningPacMan nextPacMan = new OldLearningPacMan();
			
			for (int i = 0; i < N; i++)
				for (int j = 0; j < M; j++) {
					int p = (parents.size() > 1) ? random.nextInt(parents.size()) : 0;
					nextPacMan.m_Weights[i][j] = parents.get(p).m_Weights[i][j] + random.nextFloat() * randomness - halfRandomness; 
				}
			
			generation.add(nextPacMan);
		}
		
		return generation;
	}
	
	public float[][] GetWeights() {
		float[][] array = new float[N][M];

		for (int i = 0; i < N; i++)
			for (int j = 0; j < N; j++)
				array[i][j] = m_Weights[i][j]; 
				
		return array;
	}
	
	public void SetWeights(float[][] array) {

		for (int i = 0; i < N; i++)
			for (int j = 0; j < N; j++)
				m_Weights[i][j] = array[i][j]; 
	}
	
	// ______________________________________________ REINFORCEMENT LEARNING
	
	final boolean REINFORCEMENT_LEARNING = false;
	
	final float COUNT_SMOOTHING = 0.001f;
	final float LEARNING_RATE = 0.1f;
	
	long m_NumMoves = 0;
	long[][] m_MoveCounts = new long[N][M];
	
	public void ResetMoveCounts() {
		for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_MoveCounts[i][j] = 0;
		m_NumMoves = 0;
	}
	
	final int SCORE_MEMORY = 50;
	ArrayBlockingQueue<Integer> m_ScoreQueue = new ArrayBlockingQueue<Integer>(SCORE_MEMORY);
	private float AverageScore() {
		
		float averageScore = 0.0f;
		
		for (Integer score : m_ScoreQueue)
			averageScore += score;
		averageScore /= m_ScoreQueue.size();
		
		return averageScore;
	}
	
	int m_CurrentScore = 1;
	
	// NOTE Makes the assumption that this will only be called at the end of a game
	public void UpdateWeights() {
		
		// save current score
		if (m_ScoreQueue.size() == SCORE_MEMORY)
			m_ScoreQueue.poll();
		m_ScoreQueue.add(new Integer(m_CurrentScore));
		
		if (m_NumMoves != 0) {
			/*System.out.println();
			for (int i = 0; i < N; i++) {
				for (int j = 0; j < M; j++)
					System.out.print("\t"+m_MoveCounts[i][j]);
				System.out.println();
			}*/
			
			for (int i = 0; i < N; i++)
				for (int j = 0; j < M; j++) {
					float countFactor = ((float)m_MoveCounts[i][j]) / m_NumMoves;
					
					float averageScore = AverageScore();
					if (m_CurrentScore > averageScore) { // did well
						m_Weights[i][j] += (countFactor + COUNT_SMOOTHING) * LEARNING_RATE;
						m_timesPositive++;
					}
					else if (m_CurrentScore < averageScore) { // did poorly
						m_Weights[i][j] -= (countFactor + COUNT_SMOOTHING) * LEARNING_RATE;
						m_timesNegative++;
					}
				}
		}
		
		ResetMoveCounts();
	}
	
	int m_timesPositive = 0;
	int m_timesNegative = 0;
	
	public void PrintWeights() {
		System.out.println("Printing Weights");
		for (int i = 0; i < N; i++) {
			for (int j = 0; j < M; j++)
				System.out.print("\t"+m_Weights[i][j]);
			System.out.println();
		}
		System.out.println("Times positive: " + m_timesPositive);
		m_timesPositive = 0;
		System.out.println("Times negative: " + m_timesNegative);
		m_timesNegative = 0;
	}
	
	// ______________________________________________ DECISION MAKING
	
	float[][] m_Weights = new float[N][M]; // N x M array of weights from features to behaviors
	
	private void InitTestWeights() {
		
		/*for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_Weights[i][j] = 0.0f;

		m_Weights[0][0] = 2.0f;
		m_Weights[1][1] = 1.0f;*/
		
		for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_Weights[i][j] = 1.0f;

		/*m_Weights[0][0] = 1.2f;
		m_Weights[0][1] = 1.0f;
		m_Weights[1][0] = 1.0f;
		m_Weights[1][1] = 1.1f;*/
	}
	
	@SuppressWarnings("unused")
	private void ChooseMove(Game game) {
		
		// Run the feature checks
		RunFeatures(game);
		
		// Multiply features weights and choose max
		int bestBehaviorIndex = 0;
		float bestWeight = 0.0f;
		
		for (int j = 0; j < M; j++) { // for each behavior
			float weight = 0.0f;
			for (int i = 0; i < N; i++) // tally the feature-weight sums for that behavior
				weight += m_Weights[i][j] * m_FeatureValues[i]; 
			
			if (weight > bestWeight) { // if has a better sum, select it 
				bestWeight = weight;
				bestBehaviorIndex = j;
			}
		}
		
		// Run appropriate behavior to set m_Move
		if (bestWeight > 0.0f)
			m_Move = RunBehavior(bestBehaviorIndex, game);
		else
			m_Move = RunBehavior(-1, game);
		
		// if running reinforcement learning, save diagnostics
		if (REINFORCEMENT_LEARNING && bestWeight > 0.0f) {
			m_CurrentScore = game.getScore();
			for (int i = 0; i < N; i++)
				if (m_FeatureValues[i] > 0.0f)
					m_MoveCounts[i][bestBehaviorIndex]++;
			m_NumMoves++;
		}
	}
	
	// ______________________________________________ FEATURES
	
	float[] m_FeatureValues = new float[N];
	
	private void RunFeatures(Game game) {
		m_FeatureValues[0] = Feature0(game);
		m_FeatureValues[1] = Feature1(game);
	}
	
	// Feature0 checks if any non-edible ghost is closer than MIN_DISTANCE.
	private static final int MIN_DISTANCE=20;	 
	private float Feature0(Game game) {
		int current=game.getPacmanCurrentNodeIndex();
		
		for(GHOST ghost : GHOST.values())
			if(game.getGhostEdibleTime(ghost)==0 && game.getGhostLairTime(ghost)==0)
				if(game.getShortestPathDistance(current,game.getGhostCurrentNodeIndex(ghost))<MIN_DISTANCE)
					return 1.0f;
		return 0.0f;
	}
	
	// Feature1 checks if there are any edible gosts
	private float Feature1(Game game) {
		
		boolean edibleGhost = false;
		
		for(GHOST ghost : GHOST.values())
			if(game.getGhostEdibleTime(ghost)>0)
			{
				edibleGhost = true;
			}
		
		if(edibleGhost)
			return 1.0f;
		return 0.0f;
	}
	
	// ______________________________________________ BEHAVIORS
	
	private MOVE RunBehavior(int behaviorIndex, Game game) {
		
		int current=game.getPacmanCurrentNodeIndex();
		
		switch (behaviorIndex) {
		case 0:
		{ // Run away from ghosts
			GHOST ghost = GHOST.BLINKY;
			int distance = game.getShortestPathDistance(current,game.getGhostCurrentNodeIndex(GHOST.BLINKY));
			for(GHOST aGhost : GHOST.values()) {
				int thisDistance = game.getShortestPathDistance(current,game.getGhostCurrentNodeIndex(aGhost));
				if (thisDistance < distance) {
					distance = thisDistance;
					ghost = aGhost;
				}
			}
			return game.getNextMoveAwayFromTarget(game.getPacmanCurrentNodeIndex(),game.getGhostCurrentNodeIndex(ghost),DM.PATH);
		}
		case 1:
		{ // Eat the closest edible ghost
			int minDistance=Integer.MAX_VALUE;
			GHOST minGhost=null;		
			
			for(GHOST ghost : GHOST.values())
				if(game.getGhostEdibleTime(ghost)>0)
				{
					int distance=game.getShortestPathDistance(current,game.getGhostCurrentNodeIndex(ghost));
					
					if(distance<minDistance)
					{
						minDistance=distance;
						minGhost=ghost;
					}
				}
			
			if(minGhost!=null)	//we found an edible ghost
				return game.getNextMoveTowardsTarget(game.getPacmanCurrentNodeIndex(),game.getGhostCurrentNodeIndex(minGhost),DM.PATH);
			return MOVE.NEUTRAL;
		}
		default:
		{ // go after the pills and power pills
			int[] pills=game.getPillIndices();
			int[] powerPills=game.getPowerPillIndices();		
			
			ArrayList<Integer> targets=new ArrayList<Integer>();
			
			for(int i=0;i<pills.length;i++)					//check which pills are available			
				if(game.isPillStillAvailable(i))
					targets.add(pills[i]);
			
			for(int i=0;i<powerPills.length;i++)			//check with power pills are available
				if(game.isPowerPillStillAvailable(i))
					targets.add(powerPills[i]);				
			
			int[] targetsArray=new int[targets.size()];		//convert from ArrayList to array
			
			for(int i=0;i<targetsArray.length;i++)
				targetsArray[i]=targets.get(i);
			
			//return the next direction once the closest target has been identified
			return game.getNextMoveTowardsTarget(current,game.getClosestNodeIndexFromNodeIndex(current,targetsArray,DM.PATH),DM.PATH);
		}
		}
	}
	
}