﻿using System;
using System.Collections.Generic;
using System.Text;
using AdaptiveAgents.Agents;
using AdaptiveAgents.Games;
using AdaptiveAgents.Distributions;
using AdaptiveAgents.Loggers;
using System.Data;
using AdaptiveAgents.Results;

namespace AdaptiveAgents.Experiments
{
    /// <summary>
    /// The basic experiment to find out what is the most efficient epsilon to use.
    /// </summary>
    class PDPExperiment : Experiment
    {
        public PDPExperiment() : base(50, 3, 1) { }
        private ExperimentResults results;
        private enum AgentType { Greedy, MDPDiscovery, MDPOptimal, PDP };
        private AgentType agentType = AgentType.PDP; //init to PDP!
        double competence0;
        double competence1;
        double competence2;

        double eps0;
        double eps1;
        double eps2;

        private DataTable dtNormal;//A DataTable for a normal run

        /// <summary>
        /// A function to write the XML file as a CSV file ready for opening in excel
        /// </summary>
        /// <param name="fileName">Name of the file</param>
        /// <param name="dt">The data Table to print</param>
        private void WriteToCSV(String fileName, DataTable dt)
        {
            //set an output writer
            System.IO.TextWriter writer = new System.IO.StreamWriter(fileName);

            //print columns name
            for (int i = 0; i < dt.Columns.Count; ++i)
                writer.Write(dt.Columns[i].ToString() + ",");

            writer.WriteLine();

            //for each row...
            for (int i = 0; i < dt.Rows.Count; ++i)
            {
                DataRow row = dt.Rows[i];
                //for each cell in the row...
                for (int j = 0; j < dt.Columns.Count; ++j)
                    writer.Write(row[j].ToString() + ",");
                writer.WriteLine();
            }

            writer.Close();
        }

        

        /// <summary>
        /// Run the main experiment
        /// </summary>
        /// <param name="eps">Insert epsilon to be tested</param>
        public override void runExperiment()
        {
            //Set the competence level, the higher it is - the better the player is.
            

            int counter;
            results = new ExperimentResults();
            int numberOfDifferentGamesOnDifferentCSV = 1;
            for (counter = 0; counter < numberOfDifferentGamesOnDifferentCSV; counter++)
            {
                //Console.WriteLine("===== New game with epsilon " + epss);
                
                int GAMES = 20; //number of games to run
                int ROUNDS = 300; //number of round each game
                DataRow row1; //a row for each type of agent

                double utility = 0;

                dtNormal = new DataTable("Normal_Data_Table");
                DataColumn cln = new DataColumn("GameNum", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Competence0", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Competence1", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Competence2", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("eps0", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("eps1", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("eps2", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("avgUtilityOfTheGame", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player0NumOfTurnsPlayed", typeof(int));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player1NumOfTurnsPlayed", typeof(int));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player2NumOfTurnsPlayed", typeof(int));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player0AccumUtility", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player1AccumUtility", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player2AccumUtility", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player0AvgUtility", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player1AvgUtility", typeof(double));
                dtNormal.Columns.Add(cln);
                cln = new DataColumn("Player2AvgUtility", typeof(double));
                dtNormal.Columns.Add(cln);


                ////add rows for each round
                //for (int i = 0; i < ROUNDS; ++i)
                //{
                //    cln = new DataColumn(i.ToString(), typeof(double));
                //    dtNormal.Columns.Add("utility " + cln);
                //}

                //Get random starting values for the epsilon and competence:
                Random rand = new Random();
                #region Values for eps and comp are DIFFERENT between agents
                //Get random starting values for the epsilon and competence:
                eps0 = Math.Round(rand.NextDouble(), 1);
                CheckNumIsBetweenValidRange(ref eps0);
                do
                {
                    eps1 = Math.Round(rand.NextDouble(), 1);
                    CheckNumIsBetweenValidRange(ref eps1);
                } while (eps1 == eps0);

                do
                {
                    eps2 = Math.Round(rand.NextDouble(), 1);
                    CheckNumIsBetweenValidRange(ref eps2);
                } while (eps2 == eps1 || eps2 == eps0);

                competence0 = Math.Round(rand.NextDouble(), 1);
                CheckNumIsBetweenValidRange(ref competence0);
                do
                {
                    competence1 = Math.Round(rand.NextDouble(), 1);
                    CheckNumIsBetweenValidRange(ref competence1);
                } while (competence1 == competence0);
                do
                {
                    competence2 = Math.Round(rand.NextDouble(), 1);
                    CheckNumIsBetweenValidRange(ref competence2);
                } while (competence2 == competence1 || competence2 == competence0);
                #endregion

                //run games
                for (int i = 0; i < GAMES; ++i)
                {                

                    //write to console to see progress
                    if (i % 100 == 0)
                        System.Console.WriteLine(i);

                    for (int k = 0; k < 1; k++)
                    {
                        //set new rows for the game
                        row1 = dtNormal.NewRow();

                        //enter game number
                        row1["GameNum"] = i;

                        //save the epsilon value in the DataRows
                        row1["eps0"] = eps0;
                        row1["eps1"] = eps1;
                        row1["eps2"] = eps2;

                        //save the competence value in the DataRows
                        row1["competence0"] = competence0;
                        row1["competence1"] = competence1;
                        row1["competence2"] = competence2;

                        //Create a new environment for the experiment
                        Environment environment = new Environment();

                        //Create a list of 3 agents
                        List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, AdaptiveAgents.logger);

                        //Add agents to the environment
                        for (int j = 0; j < numAgents; j++)
                        {
                            Agent agent = agents[j];
                            environment.addAgent(agent);
                        }

                        //Create a new normal game
                        //NormalGame game = new NormalGame(environment, 10);
                        NormalGameToCSV game = (NormalGameToCSV)GameFactory.create(Games.GameType.NormalToCsv, environment, ROUNDS);

                        //Set the competence level for each agent
                        environment.agents[0].Competence = new Blocks(competence0);
                        environment.agents[1].Competence = new Blocks(competence1);
                        environment.agents[2].Competence = new Blocks(competence2);

                        //enter to the game the row it needs to write the game data to
                        game.Dr1 = row1;

                        //Start the game
                        utility = game.start(AdaptiveAgents.logger, out results, k);

                        row1["avgUtilityOfTheGame"] = utility;
                        row1["Player0NumOfTurnsPlayed"] = results.numberOfTurnsPlayer0Played;
                        row1["Player1NumOfTurnsPlayed"] = results.numberOfTurnsPlayer1Played;
                        row1["Player2NumOfTurnsPlayed"] = results.numberOfTurnsPlayer2Played;

                        row1["Player0AccumUtility"] = results.player0AccumilatingProfit;
                        row1["Player1AccumUtility"] = results.player1AccumilatingProfit;
                        row1["Player2AccumUtility"] = results.player2AccumilatingProfit;

                        row1["Player0AvgUtility"] = results.player0AccumilatingProfit / results.numberOfTurnsPlayer0Played;
                        row1["Player1AvgUtility"] = results.player1AccumilatingProfit / results.numberOfTurnsPlayer1Played;
                        row1["Player2AvgUtility"] = results.player2AccumilatingProfit / results.numberOfTurnsPlayer2Played;

                        dtNormal.Rows.Add(row1);
                    }

                    //Print to screen current utility if needed (cancel to increase speed)
                    //System.Console.WriteLine(utility);
                }
                //create file names and write to the files
                String stamp = DateTime.Now.ToString("yyyy.MM.dd-HH.mm.ss");
                //dtNormal.WriteXml("FindBestEpsilonExperiment_" + stamp + ".csv");

                WriteToCSV("PDPExp_" + counter + "_" + stamp + ".csv", dtNormal);
                
            }
        }

        /// <summary>
        /// Generate the agents for the game
        /// </summary>
        /// <param name="env">the Environment of the game</param>
        /// <param name="epsilon0">epsilon for agent 0</param>
        /// <param name="epsilon1">epsilon for agent 1</param>
        /// <param name="epsilon2">epsilon for agent 2</param>
        /// <param name="logger">Logger for logging the data</param>
        /// <returns>a list of agents for the game</returns>
        protected List<Agent> generatePlayers(Environment env, double epsilon0, double epsilon1, double epsilon2, Logger logger)
        {
            int numOfAgents = 3;
            //create agents list
            List<Agent> retList = new List<Agent>(numOfAgents);
            //create director
            AgentDirector director = new AgentDirector();

            //set data to pass to the optimal agent builder
            double[] compArr = { competence1, competence2 };
            double[] epsArr = { eps1, eps2 };

            switch (agentType)
            {
                case AgentType.Greedy:
                    director.Builder = new Agents.GreedyBuilder(); //create builder
                    agentType = AgentType.MDPDiscovery; //set agent type for next time
                    break;
                case AgentType.MDPDiscovery:
                    director.Builder = new Agents.MDPDiscoverBuilder(); //create builder
                    agentType = AgentType.MDPOptimal; //set agent type for next time
                    break;
                case AgentType.PDP:
                    director.Builder = new Agents.PDPOptimalBuilder(compArr, epsArr); //create builder
                    agentType = AgentType.PDP; //set agent type for next time
                    break;
                case AgentType.MDPOptimal:
                    director.Builder = new Agents.MDPOptimalBuilder(compArr, epsArr); //create builder
                    agentType = AgentType.Greedy; //set agent type for next time
                    break;
            }

            Agent ag;

            //construct the first agent according to the director
            director.construct(0, env, epsilon0, logger);
            ag = director.getAgent();
            ag.ChangeStrategyPoint = 10;
            retList.Add(ag);

            //construct the 2nd and 3rd agents as normal agents
            director.Builder = new Agents.NormalBuilder();
            director.construct(1, env, epsilon1, logger);
            retList.Add(director.getAgent());

            director.Builder = new Agents.NormalBuilder();
            director.construct(2, env, epsilon2, logger);
            retList.Add(director.getAgent());

            return retList;
        }
    }
}
