package pacman.entries.pacman;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ArrayBlockingQueue;

import com.sun.org.apache.bcel.internal.generic.NEW;

import pacman.controllers.Controller;
import pacman.game.Constants.DM;
import pacman.game.Constants.GHOST;
import pacman.game.Constants.MOVE;
import pacman.game.Game;
import pacman.entries.pacman.ml.*;
import pacman.entries.pacman.ml.behaviors.*;
import pacman.entries.pacman.ml.features.*;

/*
 * This is the class you need to modify for your entry. In particular, you need to
 * fill in the getAction() method. Any additional classes you write should either
 * be placed in this package or sub-packages (e.g., game.entries.pacman.mypackage).
 */
public class LearningPacMan extends Controller<MOVE> implements Serializable
{
	private static final long serialVersionUID = 1L;

	public LearningPacMan() {
		super();
		InitTestWeights();
	}
	
	private MOVE m_Move=MOVE.NEUTRAL;
	
	private List<FeatureExtractor> features;
	private List<Behavior> behaviors;
	private Behavior defaultBehavior;
	
	public MOVE getMove(Game game, long timeDue) 
	{
		//Place your game logic here to play the game as Ms Pac-Man
		ChooseMove(game);
		
		return m_Move;
	}

	// There are N features and M behaviors.
	
	static int N; // = 2;
	static int M; // = 2;
	
	// ______________________________________________ GENETIC LEARNING

	public int GetNumFeatures() { return N; }
	public int GetNumBehaviors() { return M; }
	
	private static Random random = new Random();
	
	public static ArrayList<LearningPacMan> Generation(ArrayList<LearningPacMan> parents, 
			int count, float randomness) {
		
		ArrayList<LearningPacMan> generation = new ArrayList<LearningPacMan>(count);
		
		float halfRandomness = randomness/2.0f;
		
		for (int c = 0; c < count; c++) {

			LearningPacMan nextPacMan = new LearningPacMan();
			
			for (int i = 0; i < N; i++)
				for (int j = 0; j < M; j++) {
					int p = (parents.size() > 1) ? random.nextInt(parents.size()) : 0;
					nextPacMan.m_Weights[i][j] = parents.get(p).m_Weights[i][j] + random.nextFloat() * randomness - halfRandomness; 
				}
			
			generation.add(nextPacMan);
		}
		
		return generation;
	}
	
	public float[][] GetWeights() {
		float[][] array = new float[N][M];

		for (int i = 0; i < N; i++)
			for (int j = 0; j < N; j++)
				array[i][j] = m_Weights[i][j]; 
				
		return array;
	}
	
	public void SetWeights(float[][] array) {

		for (int i = 0; i < N; i++)
			for (int j = 0; j < N; j++)
				m_Weights[i][j] = array[i][j]; 
	}
	
	// ______________________________________________ REINFORCEMENT LEARNING
	
	final boolean REINFORCEMENT_LEARNING = false;
	
	final float COUNT_SMOOTHING = 0.001f;
	final float LEARNING_RATE = 0.1f;
	
	long m_NumMoves = 0;
	long[][] m_MoveCounts = new long[N][M];
	
	public void ResetMoveCounts() {
		for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_MoveCounts[i][j] = 0;
		m_NumMoves = 0;
	}
	
	final int SCORE_MEMORY = 50;
	ArrayBlockingQueue<Integer> m_ScoreQueue = new ArrayBlockingQueue<Integer>(SCORE_MEMORY);
	private float AverageScore() {
		
		float averageScore = 0.0f;
		
		for (Integer score : m_ScoreQueue)
			averageScore += score;
		averageScore /= m_ScoreQueue.size();
		
		return averageScore;
	}
	
	int m_CurrentScore = 1;
	
	// NOTE Makes the assumption that this will only be called at the end of a game
	public void UpdateWeights() {
		
		// save current score
		if (m_ScoreQueue.size() == SCORE_MEMORY)
			m_ScoreQueue.poll();
		m_ScoreQueue.add(new Integer(m_CurrentScore));
		
		if (m_NumMoves != 0) {
			/*System.out.println();
			for (int i = 0; i < N; i++) {
				for (int j = 0; j < M; j++)
					System.out.print("\t"+m_MoveCounts[i][j]);
				System.out.println();
			}*/
			
			for (int i = 0; i < N; i++)
				for (int j = 0; j < M; j++) {
					float countFactor = ((float)m_MoveCounts[i][j]) / m_NumMoves;
					
					float averageScore = AverageScore();
					if (m_CurrentScore > averageScore) { // did well
						m_Weights[i][j] += (countFactor + COUNT_SMOOTHING) * LEARNING_RATE;
						m_timesPositive++;
					}
					else if (m_CurrentScore < averageScore) { // did poorly
						m_Weights[i][j] -= (countFactor + COUNT_SMOOTHING) * LEARNING_RATE;
						m_timesNegative++;
					}
				}
		}
		
		ResetMoveCounts();
	}
	
	int m_timesPositive = 0;
	int m_timesNegative = 0;
	
	public void PrintWeights() {
		System.out.println("Printing Weights");
		for (int i = 0; i < N; i++) {
			for (int j = 0; j < M; j++)
				System.out.print("\t"+m_Weights[i][j]);
			System.out.println();
		}
		System.out.println("Times positive: " + m_timesPositive);
		m_timesPositive = 0;
		System.out.println("Times negative: " + m_timesNegative);
		m_timesNegative = 0;
	}
	
	// ______________________________________________ DECISION MAKING
	
	float[][] m_Weights;// = new float[N][M]; // N x M array of weights from features to behaviors
	
	private void InitTestWeights() {
		
		features = new ArrayList<FeatureExtractor>(2);
		features.add(FeatureExtractor.getInstance(NonEdibleGhostDistanceThresholdFeature.class, 20));
		features.add(FeatureExtractor.getInstance(EdibleGhostsFeature.class, 0));
		features.add(FeatureExtractor.getInstance(DistanceToGhostFeature.class, 0.0f));
		features.add(FeatureExtractor.getInstance(DistanceToGhostFeature.class, 1.0f));
		features.add(FeatureExtractor.getInstance(DistanceToGhostFeature.class, 2.0f));
		features.add(FeatureExtractor.getInstance(DistanceToGhostFeature.class, 3.0f));
		features.add(FeatureExtractor.getInstance(DistanceToClosestGhostFeature.class, 0.0f));
		// features.add(new NonEdibleGhostDistanceThresholdFeature(20));
		// features.add(new EdibleGhostsFeature());
		
		behaviors = new ArrayList<Behavior>();
		behaviors.add(new RunAwayFromGhostsBehavior());
		behaviors.add(new EatClosestEdibleGhostBehavior());
		behaviors.add(new RunAwayFromSingleGhostBehavior(GHOST.INKY));
		behaviors.add(new RunAwayFromSingleGhostBehavior(GHOST.BLINKY));
		behaviors.add(new RunAwayFromSingleGhostBehavior(GHOST.PINKY));
		behaviors.add(new RunAwayFromSingleGhostBehavior(GHOST.SUE));
		defaultBehavior = new EatClosestPillBehavior();
		
		N = features.size();
		M = behaviors.size();
		m_Weights = new float[N][M];
		m_FeatureValues = new float[N];
		
		/*for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_Weights[i][j] = 0.0f;

		m_Weights[0][0] = 2.0f;
		m_Weights[1][1] = 1.0f;*/
		
		for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_Weights[i][j] = 1.0f;

		/*m_Weights[0][0] = 1.2f;
		m_Weights[0][1] = 1.0f;
		m_Weights[1][0] = 1.0f;
		m_Weights[1][1] = 1.1f;*/
	}
	
	@SuppressWarnings("unused")
	private void ChooseMove(Game game) {
		
		// Run the feature checks
		RunFeatures(game);
		
		// Multiply features weights and choose max
		int bestBehaviorIndex = 0;
		float bestWeight = 0.0f;
		
		for (int j = 0; j < M; j++) { // for each behavior
			float weight = 0.0f;
			for (int i = 0; i < N; i++) // tally the feature-weight sums for that behavior
				weight += m_Weights[i][j] * m_FeatureValues[i]; 
			
			if (weight > bestWeight) { // if has a better sum, select it 
				bestWeight = weight;
				bestBehaviorIndex = j;
			}
		}
		
		// Run appropriate behavior to set m_Move
		if (bestWeight > 0.0f)
			m_Move = RunBehavior(bestBehaviorIndex, game);
		else
			m_Move = RunBehavior(-1, game);
		
		// if running reinforcement learning, save diagnostics
		if (REINFORCEMENT_LEARNING && bestWeight > 0.0f) {
			m_CurrentScore = game.getScore();
			for (int i = 0; i < N; i++)
				if (m_FeatureValues[i] > 0.0f)
					m_MoveCounts[i][bestBehaviorIndex]++;
			m_NumMoves++;
		}
	}
	
	// ______________________________________________ FEATURES
	
	float[] m_FeatureValues;
	
	private void RunFeatures(Game game) {
		int i = 0;
		for(FeatureExtractor feature : features) {
			m_FeatureValues[i++] = feature.GetValue(game);
		}
	}
	
	// ______________________________________________ BEHAVIORS
	
	private MOVE RunBehavior(int behaviorIndex, Game game) {
		
		Behavior behavior;
		try {
			behavior = behaviors.get(behaviorIndex);
		}
		catch(IndexOutOfBoundsException e) {
			behavior = defaultBehavior;
		}
		return behavior.decide(game);
	}
	
}