package pacman.entries.pacman;

import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;

import pacman.game.Constants.MOVE;
import pacman.game.Game;
import pacman.entries.pacman.ml.*;
import pacman.entries.pacman.ml.behaviors.*;
import pacman.entries.pacman.ml.features.*;

/*
 * This is the class you need to modify for your entry. In particular, you need to
 * fill in the getAction() method. Any additional classes you write should either
 * be placed in this package or sub-packages (e.g., game.entries.pacman.mypackage).
 */
public class PerceptronTrainablePacMan extends TrainablePacMan
{
	
	public PerceptronTrainablePacMan(String filename, boolean training) {
		super(filename, training);
	}

	// private float[][] m_DefaultWeights = {{2.0f,0.0f},{0.0f,1.0f}};
	
	private MOVE m_Move=MOVE.NEUTRAL;

	// There are N features and M behaviors.
	
	int N;
	int M;
	
	private List<FeatureExtractor> features;
	private List<Behavior> behaviors;
	private Behavior defaultBehavior;
	
	// ______________________________________________ REINFORCEMENT LEARNING
	
	final float COUNT_SMOOTHING = 0.001f;
	final float LEARNING_RATE = 0.1f;
	
	long m_NumMoves = 0;
	long[][] m_MoveCounts = new long[N][M];
	
	private void ResetMoveCounts() {
		for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_MoveCounts[i][j] = 0;
		m_NumMoves = 0;
	}
	
	final int SCORE_MEMORY = 50;
	ArrayBlockingQueue<Integer> m_ScoreQueue = new ArrayBlockingQueue<Integer>(SCORE_MEMORY);
	private float AverageScore() {
		
		float averageScore = 0.0f;
		
		for (Integer score : m_ScoreQueue)
			averageScore += score;
		averageScore /= m_ScoreQueue.size();
		
		return averageScore;
	}
	
	int m_CurrentScore = 1;
	
	// NOTE Makes the assumption that this will only be called at the end of a game
	protected void TrainingStep() {
		
		// save current score
		if (m_ScoreQueue.size() == SCORE_MEMORY)
			m_ScoreQueue.poll();
		m_ScoreQueue.add(new Integer(m_CurrentScore));
		
		if (m_NumMoves != 0) {
			/*System.out.println();
			for (int i = 0; i < N; i++) {
				for (int j = 0; j < M; j++)
					System.out.print("\t"+m_MoveCounts[i][j]);
				System.out.println();
			}*/
			
			for (int i = 0; i < N; i++)
				for (int j = 0; j < M; j++) {
					float countFactor = ((float)m_MoveCounts[i][j]) / m_NumMoves;
					
					float averageScore = AverageScore();
					if (m_CurrentScore > averageScore) { // did well
						m_Weights[i][j] += (countFactor + COUNT_SMOOTHING) * LEARNING_RATE;
						m_timesPositive++;
					}
					else if (m_CurrentScore < averageScore) { // did poorly
						m_Weights[i][j] -= (countFactor + COUNT_SMOOTHING) * LEARNING_RATE;
						m_timesNegative++;
					}
				}
		}
		
		ResetMoveCounts();
	}
	
	int m_timesPositive = 0;
	int m_timesNegative = 0;
	
	public void PrintParameters() {
		System.out.println("Printing Weights");
		for (int i = 0; i < N; i++) {
			for (int j = 0; j < M; j++)
				System.out.print("\t"+m_Weights[i][j]);
			System.out.println();
		}
		System.out.println("Times positive: " + m_timesPositive);
		m_timesPositive = 0;
		System.out.println("Times negative: " + m_timesNegative);
		m_timesNegative = 0;
	}
	
	// ______________________________________________ DECISION MAKING
	
	float[][] m_Weights; // N x M array of weights from features to behaviors
	
	private void InitVariables() {
		features = new ArrayList<FeatureExtractor>(2);
		features.add(FeatureExtractor.getInstance(NonEdibleGhostDistanceThresholdFeature.class, 20));
		features.add(FeatureExtractor.getInstance(EdibleGhostsFeature.class, 0));
		features.add(FeatureExtractor.getInstance(GhostDirectionFeature.class, 0));
		// features.add(new NonEdibleGhostDistanceThresholdFeature(20));
		// features.add(new EdibleGhostsFeature());
		
		behaviors = new ArrayList<Behavior>();
		behaviors.add(new RunAwayFromGhostsBehavior());
		behaviors.add(new EatClosestEdibleGhostBehavior());
		defaultBehavior = new EatClosestPillBehavior();
		
		N = features.size();
		M = behaviors.size();
		
		m_Weights = new float[N][M];
		m_FeatureValues = new float[N];
	}
	
	@Override
	protected void InitTraining() {
		
		InitVariables();
		
		float[][] weights = new float[N][M];
		for (int i = 0; i < N; i++)
			for (int j = 0; j < M; j++)
				m_Weights[i][j] = 1.0f;
		
		m_Weights = weights;   // initialize the weights
	}
	
	@Override
	protected MOVE ChooseMove(Game game) {
		
		// Run the feature checks
		RunFeatures(game);
		
		// Multiply features weights and choose max
		int bestBehaviorIndex = 0;
		float bestWeight = 0.0f;
		
		for (int j = 0; j < M; j++) { // for each behavior
			float weight = 0.0f;
			for (int i = 0; i < N; i++) // tally the feature-weight sums for that behavior
				weight += m_Weights[i][j] * m_FeatureValues[i]; 
			
			if (weight > bestWeight) { // if has a better sum, select it 
				bestWeight = weight;
				bestBehaviorIndex = j;
			}
		}
		
		// Run appropriate behavior to set m_Move
		if (bestWeight > 0.0f)
			m_Move = RunBehavior(bestBehaviorIndex, game);
		else
			m_Move = RunBehavior(-1, game);
		
		// if running reinforcement learning, save diagnostics
		if (isTraining() && bestWeight > 0.0f) {
			m_CurrentScore = game.getScore();
			for (int i = 0; i < N; i++)
				if (m_FeatureValues[i] > 0.0f)
					m_MoveCounts[i][bestBehaviorIndex]++;
			m_NumMoves++;
		}
		
		return m_Move;
	}
	
	// ______________________________________________ FEATURES
	
	float[] m_FeatureValues;
	
	private void RunFeatures(Game game) {
		int i = 0;
		for(FeatureExtractor feature : features) {
			m_FeatureValues[i++] = feature.GetValue(game);
		}
	}
	
	// ______________________________________________ BEHAVIORS
	
	private MOVE RunBehavior(int behaviorIndex, Game game) {
		
		Behavior behavior;
		try {
			behavior = behaviors.get(behaviorIndex);
		}
		catch(IndexOutOfBoundsException e) {
			behavior = defaultBehavior;
		}
		return behavior.decide(game);
	}

	@Override
	protected void InitFromFile(ObjectInput input) {
		InitTraining();
		
	}

	@Override
	protected void CompleteTraining(ObjectOutput output) {
		// TODO Auto-generated method stub
		
	}

	@Override
	protected void Complete() {
		ResetMoveCounts();
		
	}
	
	@Override
	protected void InitDefault() {
		InitVariables();
		
		m_Weights = new float[][]{{2,0},{0,1},{0,1}};   // hard code weights here
		
	}
	
}