using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.Serialization.Formatters.Binary;
using System.Text;

using Pacman.Simulator;
using NeuralNetwork;

namespace Pacman.Implementations
{
	public class NeuralPac : BasePacman
	{
		private QLearning qLearning;
		private bool loadFromStream = true;

		// inputs: angle-ghost * 4, distance-ghost * 4, X, Y, 
		// outputs: up, down, left, right
		double[] inputs = new double[1];

		public NeuralPac() : base("NeuralPac","1") {
			if( loadFromStream ) {
				try {
					BinaryFormatter bf = new BinaryFormatter();
					qLearning = (QLearning)bf.Deserialize(File.OpenRead("NeuralPac.bin"));
				} catch {
					loadFromStream = false;
				}
			}
			if( !loadFromStream ) {
				Network network = new Network(1, 4, 8);
				qLearning = new QLearning(network);
			}
			qLearning.BestActionProb = 1.0;
			qLearning.DiscountFactor = 0.75;
		}

		public override Direction Think( GameState gs ) {
			// read game state
			/*int index = 0;
			for( int i = 0; i < 4; i++ ){
				inputs[index] = Network.Map(gs.Pacman.Direction - gs.Ghosts[i].Direction,0,3);
				index++;
			}
			for( int i = 0; i < 4; i++ ) {
				Node.PathInfo pathInfo = gs.Pacman.Node.ShortestPath[gs.Ghosts[i].Node.X, gs.Ghosts[i].Node.Y];
				double distance = 1.0;
				if( pathInfo != null ){
					distance = Network.Map(distance, 0, 50);
				}
				inputs[index] = distance;
				index++;
			}
			inputs[index++] = gs.Pacman.Xf / gs.Map.PixelWidth;
			inputs[index++] = gs.Pacman.Yf / gs.Map.PixelHeight;*/

			// if this is to be used, probably make it a (faster) method on Map
			Node.PathInfo bestPath = null;
			foreach( Node node in gs.Map.Nodes ) {
				if( node.Type != Node.NodeType.Wall ) {
					if( bestPath == null ) {
						bestPath = gs.Pacman.Node.ShortestPath[node.X, node.Y];
						continue;
					}
					if( node.Type == Node.NodeType.Pill || node.Type == Node.NodeType.PowerPill ){
						Node.PathInfo curPath = gs.Pacman.Node.ShortestPath[node.X, node.Y];
						if( curPath != null && curPath.Distance < bestPath.Distance ) {
							bestPath = curPath;
						}
					}
				}
			}

			//inputs[0] = Network.Map((double)bestPath.Direction, 0, 3);

			// return direction
			//Direction dir = (Direction)qLearning.GetAction(inputs);
			//Console.WriteLine(bestPath.Direction + " " + inputs[0]);
			Direction dir = bestPath.Direction;
			if( ( gs.Pacman.Node.X < 4 || gs.Pacman.Node.X > Map.Width - 5 ) && gs.Map.Tunnels[gs.Pacman.Node.Y] && gs.Pacman.InverseDirection(gs.Pacman.Direction) == dir ) {
				dir = gs.Pacman.Direction;
			}
			return dir;
		}

		public override void EatPill() {
			qLearning.GiveReward(0.1);
		}

		public override void EatPowerPill() {
			qLearning.GiveReward(0.1);
		}

		public override void EatGhost() {
			//qLearning.GiveReward(0.5);
		}

		public override void EatenByGhost() {
			//qLearning.GiveReward(-1.0);
			qLearning.ClearHistory();
		}

		public override void LevelCleared() {
			qLearning.GiveReward(1.0);
			qLearning.ClearHistory();
		}

		public override void SimulationFinished() {
			BinaryFormatter bf = new BinaryFormatter();
			bf.Serialize(File.Open("NeuralPac.bin", FileMode.Create), qLearning);
			qLearning.Network.SaveXml("NeuralPac.xml");
		}
	}
}
