﻿using System;
using System.Collections.Generic;
using System.Text;
using System.Data;
using AdaptiveAgents.Loggers;
using AdaptiveAgents.Agents;
using AdaptiveAgents.TeammateModels;

namespace AdaptiveAgents.Games
{
    /// <summary>
    /// Represents a game that is played by a group of agents for an Epsilon test
    /// </summary>
    class EpsilonTestGame:Game
    {
        //set data row
        private DataRow dr1;

        public DataRow Dr1
        {
            get { return dr1; }
            set { dr1 = value; }
        }

        //set another data row
        private DataRow dr2;

        public DataRow Dr2
        {
            get { return dr2; }
            set { dr2 = value; }
        }

        //set another data row for the player data
        private DataRow playerRow;

        public DataRow PlayerRow
        {
          get { return playerRow; }
          set { playerRow = value; }
        }

        //set another data row for the utility data
        private DataRow util;

        public DataRow Util
        {
            get { return util; }
            set { util = value; }
        }

        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="environment">EnvironmentType that contains the players</param>
        /// <param name="numOfRounds">Number of game rounds</param>
        public EpsilonTestGame(Environment env, int numOfRounds)
            : base(env, numOfRounds)
        {        }

        /// <summary>
        /// Decides which player is the next player
        /// </summary>
        /// <param name="activePlayer"></param>
        /// <returns>ID of the next player</returns>
        protected virtual int chooseNextPerformer(int activePlayer)
        {
            return _environment.agents[activePlayer].chooseNextPerformer();
        }
 
        /// <summary>
        /// Initialization of game environment and log game details
        /// </summary>
        /// <param name="logger">Logger for game details</param>
        /// <param name="numOfAgents">Number of players in game</param>
        protected void initializeGame(Logger logger, int numOfAgents)
        {
            //Initialize teammate model in each agent
            foreach (Agent agent in _environment.agents)
                agent.initializeTeammateModels(numOfAgents);
            
            //set agent 0 as adaptive (let him have the adaptive teammate model)
            List<TeammateModel> teammateModels = new List<TeammateModel>();
            for (int i = 0; i < _environment.agents.Count; i++)
            {
                Agent ag = _environment.agents[i];
                AdaptiveTeammateModel_old teammate = new AdaptiveTeammateModel_old(_environment.agents[0], ag, _environment);
                teammateModels.Add(teammate);
            }
            _environment.agents[0].TeammateModels = teammateModels;
            

            //Log: number of agents in environment
            logger.WriteLine(MessageType.PlayersCount, numOfAgents);

            //Log: agent attributes
            foreach (Agent agent in _environment.agents)
            {
                logger.WriteLine(MessageType.AgentAttributes, agent.Epsilon, agent.Competence);
                //TODO log the distributions of the agents
            }

            //Log: new line
            logger.WriteLine(MessageType.LineSeperator, "");

        }

        /// <summary>
        /// Make a move by current active player.
        /// </summary>
        /// <param name="round">The round that current active player plays in</param>
        /// <param name="activePlayer">Current active player</param>
        /// <param name="turn">Used for keep tracking of turn data</param>
        /// <param name="accumUtility">Total utility until current turn</param>
        protected void makeMove(int round, int activePlayer, Turn turn, ref double accumUtility)
        {
            //Get the performance of the active player
            double performance = _environment.agents[activePlayer].performNow();

            //Who is the active player
            turn.ActivePlayer = activePlayer;

            //Accumulate the performance of the active player
            accumUtility += performance;

            //add data to file
            playerRow[round.ToString()] = activePlayer;
            util[round.ToString()] = performance;

            //Active player performance
            turn.Performence = performance;

            //Add a new observation of current turn by all players
            foreach (Agent agent in _environment.agents)
                agent.addObservation(new Observation(round, _environment.agents[activePlayer].ID, performance));

            //Estimate competence of each performer
            for (int i = 0; i < _environment.agents.Count; ++i)
            {
                int otherAgent = (i + 1) % _environment.agents.Count; //ID of other agent than current agent
                turn.addEstimatedCompetence(_environment.agents[otherAgent].getTeammateModel(i).EstimateCompetence());
            }
        }
    
        /// <summary>
        /// Start playing the game
        /// </summary>
        /// <param name="logger">Logger that monitors game activity</param>
        /// <returns>Average utility of the players</returns>
        public virtual double start(Logger logger)
        {
            int numOfAgents = _environment.agents.Count;
            
            logger.WriteLine(MessageType.GameDetails, numOfAgents, GameType.Normal.ToString());

            //Initialization of game environment
            initializeGame(logger, numOfAgents);

            _accumUtility = 0; //Total utility of all agents in group
            int nextActivePlayer;
            int activePlayer = Generator.getNextInt(numOfAgents); //Choose the first agent to play

            int count1 = 0, count2 = 0;

            //Play the game rounds
            for (int round = 0; round < _numOfRounds; ++round)
            {
                //Create a turn record
                Turn turn = new Turn(numOfAgents);

                //Make a move by the active player
                makeMove(round, activePlayer, turn, ref _accumUtility);

                //Choose the next performer
                nextActivePlayer = chooseNextPerformer(activePlayer); 
                //turn.NextActivePlayer = nextActivePlayer; //Next performer was chosen

                
                //get data
                int best = (_environment.agents[0].getTeammateModel(0).getBest())[0];

                double eps;
                double eps1;
                double eps2;

                //Update the last observation of each player with the next player
                foreach (Agent agent in _environment.agents)
                {
                    //update the last observation and check if the agent acted as expected
                    bool actExpected = agent.updateLastObservationWithNextAgent(activePlayer, nextActivePlayer);
                    turn.addExpectedAct(actExpected);
                }

                //get the estimated epsilon of the other 2 agents
                eps1 = ((AdaptiveTeammateModel_old)(_environment.agents[0].getTeammateModel(1))).estimateEpsilonAndCometence(_environment.agents[0].ObservationsSet.observations)[0];
                eps2 = ((AdaptiveTeammateModel_old)(_environment.agents[0].getTeammateModel(2))).estimateEpsilonAndCometence(_environment.agents[0].ObservationsSet.observations)[0];

                //save the distance between the real epsilon and the estimated epsilon for the 2 agents 
                if (activePlayer == 1)
                {
                    dr1[count1.ToString()] = Math.Abs(Double.Parse(dr1["eps1"].ToString()) - eps1);
                    ++count1;
                }
                else if (activePlayer == 2)
                {
                    dr2[count2.ToString()] = Math.Abs(Double.Parse(dr2["eps2"].ToString()) - eps2);
                    ++count2;
                }
                

                //Set active player
                activePlayer = nextActivePlayer;
            
                //Log: turn data
                logger.WriteLine(MessageType.Turn, turn);
            }

            /***** Logging the agents' summaries *****/

            return logStatistics(logger);
        }

        /// <summary>
        /// Calculates statistics of the game
        /// </summary>
        /// <param name="logger">Logger for game statistics</param>
        /// <returns></returns>
        protected double logStatistics(Logger logger)
        {
            //Log observations of each agent
            foreach (Agent agent in _environment.agents)
            {
                logger.WriteLine(MessageType.ObservationsHeader, agent.ID);
                agent.ObservationsSet.printObservations(1, 50);
            }

            //Calculate the average utility
            double avgUtility = _accumUtility / _numOfRounds;

            //Log game statistics
            //logger.WriteLine(MessageType.GameStats, AdaptiveAgents.epsilon, _accumUtility, _numOfRounds, avgUtility);

            //Log num of actions
            foreach (Agent agent in _environment.agents)
            {
                logger.WriteLine(MessageType.NumActions, agent.NumActions);
            }

            //Log a new line
            logger.WriteLine(MessageType.LineSeperator, "");

            return avgUtility; //Return the calculated average utility
        }
    }
}
