﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Xna.Framework;
using Tron.DataStructures;

namespace Tron.Players.Search
{
    class IDSMiniMaxSearchPlayer : Player
    {
        public static readonly int EMPTY_BLOCK = -2;

        public static readonly int DEFAULT_TURN_TIME = 200;

        private static readonly List<Vector2> possibleMoves = new List<Vector2>{ Vector2.UnitX, -Vector2.UnitX, Vector2.UnitY, -Vector2.UnitY};

        private double alpha;
        private double beta;

        private int opponentIndex;

        private Vector2 bestMove;
        private double bestValue = Double.NegativeInfinity;

        public IDSMiniMaxSearchPlayer(int playerIndex)
            : this(playerIndex, DEFAULT_TURN_TIME)
        {

        }

        /**
         * <summary>Constructs an iterative deepening alpha beta search player
         * with the given player index and move time.</summary>
         * <param name="moveTime">The time in milliseconds that the player is given to
         * move</param> */
        public IDSMiniMaxSearchPlayer(int playerIndex, int moveTime)
            : base(playerIndex)
        {

        }

        public override void Update(Percepts percepts, Actions actions)
        {
            // changed for a friend
            if (percepts.PlayerStates.Count != 3)
            {
                throw new NotImplementedException();
            }

            opponentIndex = (PlayerIndex == 1) ? 0 : 1;

            Vector2 agentLoc = percepts.PlayerStates[PlayerIndex].Position;
            Vector2 opponentLoc = percepts.PlayerStates[opponentIndex].Position;

            int columns = percepts.Board.BlockArray.GetLength(0);
            int rows    = percepts.Board.BlockArray.GetLength(1);

            int[,] board = new int[columns,rows];

            for (int x = 0; x < columns; x++)
                for (int y = 0; y < rows; y++)
                    board[x, y] = percepts.Board.BlockArray[x, y];

            Console.WriteLine("Update");

            // set the action
            actions.MoveDirection = minimaxSearch(board,agentLoc,opponentLoc, percepts, actions);
        }

        private Vector2 minimaxSearch(int[,] board, Vector2 agentLoc, Vector2 opponentLoc,
            Percepts percepts, Actions actions)
        {
            if (board == null)       throw new NullReferenceException("percepts is null");
            if (agentLoc == null)    throw new NullReferenceException("agentLoc is null");
            if (opponentLoc == null) throw new NullReferenceException("opponentLoc is null"); 
            
            //if (isTerminal(board,agentLoc,opponentLoc))
            //    throw new ArgumentException("Cannot start alpha beta search from a terminal state!");
           
            minValue(board, 0, agentLoc, opponentLoc, percepts,  actions);

            Console.WriteLine("AlphaBetaSearch:");
            Console.WriteLine("  agent location:    {0}", agentLoc.ToString());
            Console.WriteLine("  opponent location: {0}", opponentLoc.ToString());
            Console.WriteLine("  SELECTED MOVE: {0}\n\n", bestMove);
            return bestMove;
        }

        /**
         * Agent exploring successors and maximizing utility value.
         */
        private double maxValue(int[,] board, int depth, Vector2 agentLoc, Vector2 opponentLoc,
            Percepts percepts, Actions actions)
        {
            double best = Double.NegativeInfinity;
            bool isTerminal = true;
            foreach (Vector2 a in possibleMoves)
            {
                Vector2 newLocation = new Vector2(agentLoc.X + a.X, agentLoc.Y + a.Y);
                Double v;
                if ( isValid(board,newLocation) )
                {
                    isTerminal = false;
                    board[(int)newLocation.X, (int)newLocation.Y] = PlayerIndex;
                    v = minValue(board, depth + 1, newLocation, opponentLoc,
                         percepts,  actions);
                    if (v > best)
                    {
                        best = v;
                        if ( depth == 0 )
                            bestMove = a;
                    }
                    //if (v > bestValue)
                    //{
                    //    bestValue = v;
                    //    bestMove = a;
                    //}
                    board[(int)newLocation.X, (int)newLocation.Y] = EMPTY_BLOCK;
                }
            }

            if (isTerminal)
            {
                best = utility2(board, agentLoc, opponentLoc, depth + 1, percepts);
            }

            return best;
        }

        /**
         * Opponent exploring successors and minimizing utility value.
         */
        private double minValue(int[,] board, int depth, Vector2 agentLoc, Vector2 opponentLoc,
            Percepts percepts, Actions actions)
        {
            double best = Double.PositiveInfinity;
            bool isTerminal = true;
            foreach (Vector2 a in possibleMoves)
            {
                Vector2 newLocation = new Vector2(opponentLoc.X+a.X, opponentLoc.Y+a.Y);
                double v;
                if (isValid(board, newLocation))
                {
                    isTerminal = false;
                    board[(int)newLocation.X, (int)newLocation.Y] = opponentIndex;
                    v = maxValue(board, depth, agentLoc, newLocation, percepts,  actions);
                    if (v < best)
                    {
                        best = v;
                    }
                    board[(int)newLocation.X, (int)newLocation.Y] = EMPTY_BLOCK;
                }
            }

            if (isTerminal)
            {
                best = utility2(board, agentLoc, opponentLoc, depth, percepts);
            }
            return best;
        }

        /**
         * 
         * */
        private double utility2(int[,] board, Vector2 agentLoc, Vector2 opponentLoc, int depth,
            Percepts percepts)
        {
            int advantage = getAdvantage(board,  agentLoc,  opponentLoc,  depth, percepts);
            double utilityValue = 1 / Math.Pow(2, depth);
            if (advantage == PlayerIndex)
            {
                return utilityValue;
            }
            else if (advantage == opponentIndex)
            {
                return (-1) * utilityValue;
            }
            else return 0;
        }

        /**
         * Determines advantageous player
         * */
        private int getAdvantage(int[,] board, Vector2 agentLoc, Vector2 opponentLoc, int depth,
            Percepts percepts)
        {
            int advantage = -1;
            if (isTerminal(board,opponentLoc))
            {
                advantage = PlayerIndex;
            }
            else if (isTerminal(board,agentLoc)   )
            {
                advantage = opponentIndex;
            }
            return advantage;
        }

        /**
         * Check if current location is a terminal location
         * (no other available locations to move to)
         * */
        private bool isTerminal(int[,] board, Vector2 location)
        {
            bool isTerminal = false;
            foreach (Vector2 a in possibleMoves)
            {
                Vector2 newLocation = new Vector2(location.X + a.X, location.Y + a.Y);
                if (isValid(board, newLocation))
                {
                    isTerminal = true;
                    break;
                }
            }
            return isTerminal;
        }

        private bool isValid(int[,] board, Vector2 location)
        {
            return (board[(int)location.X, (int)location.Y] == EMPTY_BLOCK);
        }
    }
}
