﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Xna.Framework;
using Tron.DataStructures;
using Tron.Helpers;
using System.IO;
using System.Collections;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;


namespace Tron.Players.Simple
{
    class ReinforcementLearner : Player
    {
        public ReinforcementLearner(int playerIndex)
            : base(playerIndex) {

            //LoadLearningState();        
            //read from file
        
        }

        [Serializable]
        class StateActionPair
        {
            IPlayerStatePercepts state;
            int action;

            public StateActionPair(IPlayerStatePercepts st, int act) 
            {
                state = st;
                action = act;            
            }

            public void SetAction(int act)
            {
                action = act;            
            }
            
            public override int GetHashCode()
            {

               return ToString().GetHashCode();
            }

            public override string ToString()
            {
                String s = String.Empty;

                s = state.ToString();
                s += action.ToString();

                return s;
            }


            
            public override bool Equals(object obj)
            {
                
                String s1 = String.Empty;

                s1 = ToString();

                String s2 = String.Empty;

                s2 = ((StateActionPair)obj).ToString();

                return s1.Equals(s2);
            }

        };

        const int NUMBER_OF_FEATURES = 7;
        const int NUMBER_OF_ACTIONS = 4;
        const string LEARNING_FILE = "learn.txt";
        const string HASHTABLE_FILENAME = "hastable.txt";
        double[,] Theta = new double[NUMBER_OF_FEATURES, 4];
        int MAXN = 100;
        double alpha = 0.95, gamma = 0.95;
        static readonly List<Vector2> possibleMoves = new List<Vector2> { Vector2.UnitX, -Vector2.UnitX, Vector2.UnitY, -Vector2.UnitY };
        static readonly int EMPTY_BLOCK = -2;
        IPlayerStatePercepts PreviousState;
        Actions PreviousAction;
        int PreviousReward;
        Percepts mypercept;
        Hashtable StateAction = new Hashtable();
        
        //should be replaced by real valus of actions
        int a1 = 0, a2 = 1, a3 = 2, a4 = 3;

        double maximum(double a, double b)
        {
            if (a > b)
                return a;
            if (b > a)
                return b;

            Random rnd = new Random();
            int n = rnd.Next(2);
            if ( n == 0)
                return a;
            return b;
        
        }

        int OpponentDistance( int FeatureID)
        {
            Percepts percepts = mypercept;

            int i, minx =0, miny=0;
            IPlayerStatePercepts myPlayerState = percepts.PlayerStates[PlayerIndex];

            List<IPlayerStatePercepts> otherPlayerState = new List<IPlayerStatePercepts>();
            List<float> distanceX = new List<float>();
            List<float> distanceY = new List<float>();
            List<Boolean> relativeMovement = new List<Boolean>();

            for (i = 0; i < percepts.PlayerStates.Count; i++)
            {
                if (i != PlayerIndex)
                {
                    otherPlayerState.Add(percepts.PlayerStates[i]);
                }
            }

            //Console.WriteLine(otherPlayerState.Count);

            /*for(i = 0;i<otherPlayerState.Count;i++)
            { //Console.WriteLine(otherPlayerState[i].PlayerIndex);
            }*/

            for (i = 0; i < otherPlayerState.Count; i++)
            {
                distanceX.Add(otherPlayerState[i].Position.X - myPlayerState.Position.X);
                distanceY.Add(otherPlayerState[i].Position.Y - myPlayerState.Position.Y);
                
             //   Console.WriteLine("Diff x : " + distanceX[i]);
               // Console.WriteLine("Diff y : " + distanceY[i]);
                if (minx > distanceX[i])
                    minx = Convert.ToInt32( distanceX[i] );
                if (miny > distanceY[i])
                    miny = Convert.ToInt32( distanceY[i] );

            }

            // send distance here
            if(FeatureID == 0)
            return minx;

            return miny;
        
        }
        //int RelativeDirection(Percepts percepts)
        int RelativeDirection()
        {
            int i;
            bool direction = false;
            Percepts percepts = mypercept;

            IPlayerStatePercepts myPlayerState = percepts.PlayerStates[PlayerIndex];

            List<IPlayerStatePercepts> otherPlayerState = new List<IPlayerStatePercepts>();
            List<float> distanceX = new List<float>();
            List<float> distanceY = new List<float>();
            List<Boolean> relativeMovement = new List<Boolean>();

            for (i = 0; i < otherPlayerState.Count; i++)
            {
                if (myPlayerState.Moves.Count > 2)
                {
                    Vector2 temp1 = myPlayerState.Moves[myPlayerState.Moves.Count - 1].Start;
                    Vector2 temp2 = otherPlayerState[i].Moves[otherPlayerState[i].Moves.Count - 1].Start;
                    float distance1 = Vector2.DistanceSquared(temp1, temp2);

                    Vector2 temp3 = myPlayerState.Moves[myPlayerState.Moves.Count - 2].Start;
                    Vector2 temp4 = otherPlayerState[i].Moves[otherPlayerState[i].Moves.Count - 2].Start;
                    float distance2 = Vector2.DistanceSquared(temp3, temp4);

                    if (distance1 < distance2)
                        relativeMovement.Add(false); //Towards
                    else if (distance1 >= distance2)
                        relativeMovement.Add(true);//Away
                    /*
                    Console.WriteLine("distance1 : " + i + " : " + distance1);
                    Console.WriteLine("distance2 : " + i + " : " + distance2);
                    //Console.WriteLine(relativeMovement.Count);
                    Console.WriteLine("relativeMovement[i] : " + i + " : " + relativeMovement[i]);
                     */
                }
            }
            if (direction)
            return 1;

            return 0;
        
        }

        public int DistanceFromWalls(int FeatureID)
        {
            int max;
            Percepts percepts = mypercept;
            IPlayerStatePercepts myPlayerState = percepts.PlayerStates[PlayerIndex];

            List<IPlayerStatePercepts> otherPlayerState = new List<IPlayerStatePercepts>();
            List<float> distanceX = new List<float>();
            List<float> distanceY = new List<float>();
            List<Boolean> relativeMovement = new List<Boolean>();

       
            List<Vector2> directionVectors = new List<Vector2>() { Vector2.UnitX, -Vector2.UnitX, Vector2.UnitY, -Vector2.UnitY };
            List<int> distanceWall = new List<int>();

            Vector2 currentDirection = myPlayerState.Direction;
            Vector2 nextDirection = currentDirection;

            foreach (Vector2 directionVector in directionVectors)
            {
                distanceWall.Add(distance_to_nearest_wall(percepts, myPlayerState.Position, directionVector));
            }

            
            max = distanceWall.Max();

            return distanceWall[FeatureID];

            //Console.WriteLine(max);
            //Console.WriteLine(percepts.PlayerStates.Count);
            //Console.WriteLine(playerColors.Count);
        }

        public int distance_to_nearest_wall(Percepts percepts, Vector2 position, Vector2 direction)
        {
            int distance = 0;
            Vector2 nextPosition = position + direction;
            while (percepts.Board[nextPosition] == -2)
            {
                distance = distance + 1;
                nextPosition = nextPosition + direction;
            }
            return distance;
        }

        void LoadLearningState()
        {
            FileStream file = new FileStream(LEARNING_FILE, FileMode.Open, FileAccess.Read);

            StreamReader sr = new StreamReader(file);
            for (int i = 0; i < NUMBER_OF_FEATURES; i++)
            {
                for (int j = 0; j < 4; j++)
                {
                    Theta[i, j] = Convert.ToDouble((sr.ReadLine()));
                }

            }
            sr.Close();
            sr.Dispose();
            file.Close();
            file.Dispose();
            /////////////////////////////////////////////
            /////////////////////////
            
            
            Stream stream = File.Open(HASHTABLE_FILENAME, FileMode.Open);
            BinaryFormatter bFormatter = new BinaryFormatter();
            StateAction =
               (Hashtable)bFormatter.Deserialize(stream);
            stream.Close();
            
        }

        void SaveLearningState()
        {

            FileStream file = new FileStream(LEARNING_FILE, FileMode.Open, FileAccess.Write);

            StreamWriter sw = new StreamWriter(file);
            for (int i = 0; i < NUMBER_OF_FEATURES; i++)
            {
                for (int j = 0; j < 4; j++)
                {
                    sw.WriteLine(Theta[i, j]);
                }

            }
            sw.Close();
            sw.Dispose();
            file.Close();
            file.Dispose();

            ///////////////////////////////////////////////////

            Stream stream = File.Open(HASHTABLE_FILENAME, FileMode.Create);
            BinaryFormatter bFormatter = new BinaryFormatter();
            bFormatter.Serialize(stream, StateAction);
            stream.Close();


        }

        double ComputeFeature(int i, IPlayerStatePercepts state)
        {
            int value = 0;
            switch (i)
            {
                case 0:
                    value = OpponentDistance(0);
                    break;
                case 1:
                    value = OpponentDistance(1);
                    break;
                case 2:
                    value = RelativeDirection();
                    break;
                case 3:
                    value = DistanceFromWalls(0);
                    break;
                case 4:
                    value = DistanceFromWalls(1);
                    break;
                case 5:
                    value = DistanceFromWalls(2);
                    break;
                case 6:
                    value = DistanceFromWalls(3);
                    break;
            
            }

            return Normalize(value);
        }

        double Normalize(int big)
        {
            double next = big;
            while (next >= 1)
                next /= 10.0;
            return next;
        
        }

        double EvaluateQ(IPlayerStatePercepts state, int action)
        {
            double result = 0;
            for (int i = 0; i < NUMBER_OF_FEATURES; i++)
                result += Theta[i, action] * ( ComputeFeature(i, state) );

            return result;
        }

        double ExplorationFunction(double q, int n)
        {
            if (n < MAXN)
                return q;
            return 1;
        }

        double Alpha(double n)
        {
            return (Math.Pow(Convert.ToDouble(alpha), Convert.ToDouble(n)));

        }

        public override void NewGame()
        {
            base.NewGame();
        }

        public override void SaveResults()
        {
            //commit to file
            SaveLearningState();

            base.SaveResults();
        }
        /*
        int MaxOveraDash()
        {
            int max = 0;
            int aDash = 0, sDash = 0;

            //we need to repeat this step over a range I guess.
            max = Math.Max(max, EvaluateQ(aDash, sDash));

            return max;

        }
        */
        double EvaluateQMaxOveraDash(IPlayerStatePercepts s)
        {
            double max = -10;

            max = EvaluateQ(s, a1);
            max = maximum(max, EvaluateQ(s, a2));
            max = maximum(max, EvaluateQ(s, a3));
            max = maximum(max, EvaluateQ(s, a4));
            return max;


        }
       
        public virtual void GameOver(Percepts percepts, List<int> playerScores) 
        {
            int reward = playerScores[PlayerIndex];
            Actions act = new Actions(Vector2.UnitX);
            
            PlayMove(percepts, act, reward);
            PreviousState = null;
            PreviousReward = 0;
            PreviousAction = null;


        }

        int Actions2Int(Actions action)
        {
            if (action.MoveDirection == Vector2.UnitX)
                return 1;
            if (action.MoveDirection == Vector2.UnitY)
                return 2;
            if (action.MoveDirection == -Vector2.UnitX)
                return 3;
            //if (action.MoveDirection == -Vector2.UnitY)
              
            return 4;

        }

        Actions Int2Actions(int action)
        {
 
            Actions act = new Actions(Vector2.UnitX);
            if (action == 1)
                act.MoveDirection = Vector2.UnitX;
            if (action == 2)
                act.MoveDirection = Vector2.UnitY;
            if (action == 3)
                act.MoveDirection = -Vector2.UnitX;
            if (action == 4)
                act.MoveDirection = -Vector2.UnitY;

            return act;        
        }

        Actions Int2Actions(double action)
        {

            Actions act = new Actions(Vector2.UnitX);
            if (action == 1)
                act.MoveDirection = Vector2.UnitX;
            if (action == 2)
                act.MoveDirection = Vector2.UnitY;
            if (action == 3)
                act.MoveDirection = -Vector2.UnitX;
            if (action == 4)
                act.MoveDirection = -Vector2.UnitY;

            return act;
        }

        
        void PlayMove(Percepts percepts, Actions actions, int rDash)
        {
            // Retrieve the player state for this player.
            IPlayerStatePercepts CurrentState = percepts.PlayerStates[PlayerIndex];
            mypercept = percepts;

            if (PreviousState != null)
            {

                StateActionPair SAPair = new StateActionPair(PreviousState, Actions2Int( PreviousAction) ); 

                int value = 0;

                if (StateAction.ContainsKey(SAPair))
                {
                    value = Convert.ToInt32(StateAction[SAPair]);
                    StateAction.Remove(SAPair);
                }

                value++;
                StateAction.Add(SAPair, value);

                for (int i = 0; i < NUMBER_OF_FEATURES; i++)
                {
                    // convert action to indexable int
                    /*
                    Theta[i, Actions2Int(PreviousAction)] += Alpha(Convert.ToInt32(StateAction[key]) * 1 *
                        ( 0.95 * EvaluateQMaxOveraDash(PreviousState))
                        );
                    */
                    
                    Theta[i, Actions2Int(PreviousAction)] += Alpha(Convert.ToInt32(StateAction[SAPair]) * ComputeFeature(i, PreviousState) *
                        (PreviousReward + gamma * EvaluateQMaxOveraDash(PreviousState))
                        );
                }
                
            }


            PreviousState = CurrentState;

            PreviousReward = rDash;
            double max = -1.0;
            int act = a1;
            StateActionPair current = new StateActionPair(CurrentState, a1 );

            act = a1;
            max = ExplorationFunction(EvaluateQ(CurrentState, a1), Convert.ToInt32(StateAction[current]));
            Random Rndm = new Random();
            current.SetAction(a2);
            double expectedvalue = ExplorationFunction(EvaluateQ(CurrentState, a2), Convert.ToInt32(StateAction[current]));
            if (expectedvalue >= max)
            {
                
                max = expectedvalue;

                if (expectedvalue == max)
                {
                    if (Rndm.Next(2) == 0)
                        act = a2;

                }
                else
                {
                    act = a2;                
                }

            }

            current.SetAction(a3);
            expectedvalue = ExplorationFunction(EvaluateQ(CurrentState, a3), Convert.ToInt32(StateAction[current]));
            if (expectedvalue >= max)
            {
                max = expectedvalue ;
                if (expectedvalue == max)
                {
                    if (Rndm.Next(2) == 0)
                        act = a3;

                }
                else
                {
                    act = a3;
                }


            }
            
            current.SetAction(a4);
            expectedvalue = ExplorationFunction(EvaluateQ(CurrentState, a4), Convert.ToInt32(StateAction[current]));
            if (expectedvalue >= max)
            {
                
                max = expectedvalue;
                if (expectedvalue == max)
                {
                    if (Rndm.Next(2) == 0)
                        act = a4;

                }
                else
                {
                    act = a4;
                }


            }
            
            PreviousAction = Int2Actions( act );
            
            actions.MoveDirection = PreviousAction.MoveDirection;
            
        
        }

        public override void Update(Percepts percepts, Actions actions)
        
        {
            PlayMove(percepts, actions, 0);

        
            return;
        }

    }
}
