﻿using System;
using System.Collections.Generic;
using System.Data;
using AdaptiveAgents.Agents;
using AdaptiveAgents.Loggers;
using AdaptiveAgents.Games;
using AdaptiveAgents.Distributions;

namespace AdaptiveAgents.Experiments
{
    /// <summary>
    /// An experiment that compares between different types of agents
    /// </summary>
    class EpsilonComparisonToCSVExperiment : Experiment
    {
        //Data members
        int GAMES = 2000;
        int ROUNDS = 300;

        double competence0;
        double competence1;
        double competence2;

        double eps0;
        double eps1;
        double eps2;

        //enum to swap between the different agents for diffrent tests
        private enum AgentType { Test1, Test2, Test3, Test4, Test5, Test6, Test7, Test8 };
        private AgentType agentType = AgentType.Test1;

        /// <summary>
        /// Constructor
        /// </summary>
        public EpsilonComparisonToCSVExperiment()
            : base(50, 3, 1) {}

        /// <summary>
        /// A function to write the XML file as a CSV file ready for opening in excel
        /// </summary>
        /// <param name="fileName">Name of the file</param>
        /// <param name="dt">The data Table to print</param>
        private void WriteToCSV(String fileName, DataTable dt)
        {
            //set an output writer
            System.IO.TextWriter writer = new System.IO.StreamWriter(fileName);

            //print columns name
            for (int i = 0; i < dt.Columns.Count; ++i)
                writer.Write(dt.Columns[i].ToString() + ";");

            writer.WriteLine();

            //for each row...
            for (int i = 0; i < dt.Rows.Count; ++i)
            {
                DataRow row = dt.Rows[i];
                //for each cell in the row...
                for (int j = 0; j < dt.Columns.Count; ++j)
                    writer.Write(row[j].ToString() + ";");
                writer.WriteLine();
            }

            writer.Close();
        }

        /// <summary>
        /// Set the epsilon values in the DataRow for printing
        /// </summary>
        /// <param name="row">The DataRow that we want to save the data in</param>
        /// <param name="eps0">Epsilon value for agent 0</param>
        /// <param name="eps1">Epsilon value for agent 1</param>
        /// <param name="eps2">Epsilon value for agent 2</param>
        private void setEpsilon(DataRow row, double eps0, double eps1, double eps2)
        {
            row["eps0"] = eps0;
            row["eps1"] = eps1;
            row["eps2"] = eps2;
        }

        /// <summary>
        /// Set the competence values in the DataRow for printing
        /// </summary>
        /// <param name="row">The DataRow that we want to save the data in</param>
        /// <param name="comp0">Competence value for agent 0</param>
        /// <param name="comp1">Competence value for agent 1</param>
        /// <param name="comp2">Competence value for agent 2</param>
        private void setCompetence(DataRow row, double comp0, double comp1, double comp2)
        {
            row["competence0"] = competence0;
            row["competence1"] = competence1;
            row["competence2"] = competence2;
        }

        /// <summary>
        /// Create a DataTable with the needed Columns
        /// </summary>
        /// <param name="name">A name for the DataTable</param>
        /// <param name="rounds">Number of rounds (for the columns)</param>
        /// <returns>A DataTable ready for the test</returns>
        private DataTable createTable(String name, int rounds)
        {
            //Create a table
            DataTable dt = new DataTable(name);

            DataColumn cln = new DataColumn("GameNum", typeof(double));
            dt.Columns.Add(cln);
            cln = new DataColumn("Competence0", typeof(double));
            dt.Columns.Add(cln);
            cln = new DataColumn("Competence1", typeof(double));
            dt.Columns.Add(cln);
            cln = new DataColumn("Competence2", typeof(double));
            dt.Columns.Add(cln);
            cln = new DataColumn("eps0", typeof(double));
            dt.Columns.Add(cln);
            cln = new DataColumn("eps1", typeof(double));
            dt.Columns.Add(cln);
            cln = new DataColumn("eps2", typeof(double));
            dt.Columns.Add(cln);
            cln = new DataColumn("utility", typeof(double));
            dt.Columns.Add(cln);

            //Add a column for each round
            for (int i = 0; i < rounds; ++i)
            {
                cln = new DataColumn("utility " + i.ToString(), typeof(double));
                dt.Columns.Add(cln);
            }

            return dt;
        }

        /// <summary>
        /// Only first random 
        /// </summary>
        private void runTest1(int random, String stamp, List<double[]> values)
        {
            agentType = AgentType.Test1;
            DataTable dt = createTable("OnlyFirstRandom", ROUNDS);
            //Run games
            for (int i = 0; i < GAMES; ++i)
            {
                //Print to screen every 100 games
                if (i % 100 == 0)
                    System.Console.WriteLine(i + " from 1st test");

                //Create new row for each game
                DataRow row = dt.NewRow();

                row["GameNum"] = i;
                
                //Generate epsilons
                eps0 = values[i][0];
                eps1 = values[i][1];
                eps2 = values[i][2];

                setEpsilon(row, eps0, eps1, eps2);

                //Generate competences
                competence0 = values[i][3];
                competence1 = values[i][4];
                competence2 = values[i][5];

                setCompetence(row, competence0, competence1, competence2);

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, _logger);

                for (int j = 0; j < NumAgents; j++)
                {
                    Agent agent = agents[j];
                    environment.addAgent(agent);
                }

                //Set only first random choice in strategic agent
                environment.agents[0].ChangeStrategyPoint = 2;

                //create game
                MDPGame_old game = (MDPGame_old)GameFactory.create(Games.GameType.MDP, environment, ROUNDS);

                double utility = 0;

                //set competences to the agents
                environment.agents[0].Competence = new Blocks(competence0);
                environment.agents[1].Competence = new Blocks(competence1);
                environment.agents[2].Competence = new Blocks(competence2);


                //enter the row to the game (so the game will write the data in it)
                game.Dr1 = row;

                utility = game.start(_logger);

                row["utility"] = utility;

                dt.Rows.Add(row);
            }
            WriteToCSV("FirstRandom_" + random + "_" + stamp + ".txt", dt);
        }

       
        /// <summary>
        /// No raise competence + only first 10 rounds is random
        /// </summary>
        private void runTest2(int random, String stamp, List<double[]> values)
        {
            agentType = AgentType.Test2;
            DataTable dt = createTable("NoRaise10Random", ROUNDS);

             //Run games
            for (int i = 0; i < GAMES; ++i)
            {
                //Print to screen every 100 games
                if (i % 100 == 0)
                    System.Console.WriteLine(i + " from 2nd test");

                //Create new row for each game
                DataRow row = dt.NewRow();

                row["GameNum"] = i;

                //Generate epsilons
                eps0 = values[i][0];
                eps1 = values[i][1];
                eps2 = values[i][2];

                setEpsilon(row, eps0, eps1, eps2);

                //Generate competences
                competence0 = values[i][3];
                competence1 = values[i][4];
                competence2 = values[i][5];

                setCompetence(row, competence0, competence1, competence2);

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, _logger);

                for (int j = 0; j < NumAgents; j++)
                {
                    Agent agent = agents[j];
                    environment.addAgent(agent);
                }

                //Set only first random choice in strategic agent
                environment.agents[0].ChangeStrategyPoint = 10;

                //create game
                MDPNoRaiseCompetenceGame game = (MDPNoRaiseCompetenceGame)GameFactory.create(Games.GameType.MDPNoRaiseCompetence, environment, ROUNDS);

                double utility = 0;

                //set competences to the agents
                environment.agents[0].Competence = new Blocks(competence0);
                environment.agents[1].Competence = new Blocks(competence1);
                environment.agents[2].Competence = new Blocks(competence2);

                //enter the row to the game (so the game will write the data in it)
                game.Dr1 = row;

                utility = game.start(_logger);

                row["utility"] = utility;

                dt.Rows.Add(row);
            }
            WriteToCSV("NoRaise10Randoms_" + random + "_" + stamp + ".txt", dt);
        }

        /// <summary>
        /// Use a different type of choosing for the adaptive agent
        /// </summary>
        private void runTest4(int random, String stamp, List<double[]> values)
        {
            agentType = AgentType.Test4;
            DataTable dt = createTable("DifferentBehavior", ROUNDS);

            //Run games
            for (int i = 0; i < GAMES; ++i)
            {
                //Print to screen every 100 games
                if (i % 100 == 0)
                    System.Console.WriteLine(i + " from 4th test");

                //Create new row for each game
                DataRow row = dt.NewRow();

                row["GameNum"] = i;

                //Generate epsilons
                eps0 = values[i][0];
                eps1 = values[i][1];
                eps2 = values[i][2];

                setEpsilon(row, eps0, eps1, eps2);

                //Generate competences
                competence0 = values[i][3];
                competence1 = values[i][4];
                competence2 = values[i][5];

                setCompetence(row, competence0, competence1, competence2);

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, _logger);

                for (int j = 0; j < NumAgents; j++)
                {
                    Agent agent = agents[j];
                    environment.addAgent(agent);
                }

                //Set only first random choice in strategic agent
                environment.agents[0].ChangeStrategyPoint = random;

                //create game
                MDPGameNoRaise game = (MDPGameNoRaise)GameFactory.create(Games.GameType.MDPNoRaise, environment, ROUNDS);

                double utility = 0;

                //set competences to the agents
                environment.agents[0].Competence = new Blocks(competence0);
                environment.agents[1].Competence = new Blocks(competence1);
                environment.agents[2].Competence = new Blocks(competence2);

                //enter the row to the game (so the game will write the data in it)
                game.Dr1 = row;

                utility = game.start(_logger);

                row["utility"] = utility;

                dt.Rows.Add(row);
            }
            WriteToCSV("DifferentBehavior_" + random + "_" + random + "_" + stamp + ".txt", dt);
        }

        /// <summary>
        /// Use a different game (MDPFindDifferentGame) that insert a new kind of TeammateModel to the adaptive agent
        /// </summary>
        private void runTest5(int random, String stamp, List<double[]> values)
        {
            agentType = AgentType.Test5;
            DataTable dt = createTable("Different MDP Choice", ROUNDS);
            
            //Run games
            for (int i = 0; i < GAMES; ++i)
            {
                //Print to screen every 100 games
                if (i % 100 == 0)
                    System.Console.WriteLine(i + " from 5th test");

                //Create new row for each game
                DataRow row1 = dt.NewRow();

                row1["GameNum"] = i;

                //Generate epsilons
                eps0 = values[i][0];
                eps1 = values[i][1];
                eps2 = values[i][2];

                setEpsilon(row1, eps0, eps1, eps2);

                //Generate competences
                competence0 = values[i][3];
                competence1 = values[i][4];
                competence2 = values[i][5];

                setCompetence(row1, competence0, competence1, competence2);

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, _logger);

                for (int j = 0; j < NumAgents; j++)
                {
                    Agent agent = agents[j];
                    environment.addAgent(agent);
                }

                //Set only first random choice in strategic agent
                environment.agents[0].ChangeStrategyPoint = random;

                //create game
                MDPFindDifferentGame game = (MDPFindDifferentGame)GameFactory.create(Games.GameType.MDPFindDifferent, environment, ROUNDS);

                double utility = 0;

                //set competences to the agents
                environment.agents[0].Competence = new Blocks(competence0);
                environment.agents[1].Competence = new Blocks(competence1);
                environment.agents[2].Competence = new Blocks(competence2);

                //enter the row to the game (so the game will write the data in it)
                game.Dr1 = row1;

                utility = game.start(_logger);

                row1["utility"] = utility;

                dt.Rows.Add(row1);
            }
            WriteToCSV("MDPFindDifferent_" + random + "_" + stamp + ".txt", dt);
        }

        /// <summary>
        /// Run the original MDP agent with the original setting
        /// </summary>
        private void runTest6(int random, String stamp, List<double[]> values)
        {
            agentType = AgentType.Test6;
            DataTable dt = createTable("OriginalMDPExperiment", ROUNDS);
            //Run games
            for (int i = 0; i < GAMES; ++i)
            {
                //Print to screen every 100 games
                if (i % 100 == 0)
                    System.Console.WriteLine(i + " from 6th test");

                //Create new row for each game
                DataRow row = dt.NewRow();

                row["GameNum"] = i;

                //Generate epsilons
                eps0 = values[i][0];
                eps1 = values[i][1];
                eps2 = values[i][2];

                setEpsilon(row, eps0, eps1, eps2);

                //Generate competences
                competence0 = values[i][3];
                competence1 = values[i][4];
                competence2 = values[i][5];

                setCompetence(row, competence0, competence1, competence2);

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, _logger);

                for (int j = 0; j < NumAgents; j++)
                {
                    Agent agent = agents[j];
                    environment.addAgent(agent);
                }

                //Set only first random choice in strategic agent
                environment.agents[0].ChangeStrategyPoint = random;

                //create game
                MDPGame_old game = (MDPGame_old)GameFactory.create(Games.GameType.MDP, environment, ROUNDS);

                double utility = 0;

                //set competences to the agents
                environment.agents[0].Competence = new Blocks(competence0);
                environment.agents[1].Competence = new Blocks(competence1);
                environment.agents[2].Competence = new Blocks(competence2);

                //enter the row to the game (so the game will write the data in it)
                game.Dr1 = row;

                utility = game.start(_logger);

                row["utility"] = utility;

                dt.Rows.Add(row);
            }
            WriteToCSV("OriginalMDPExperiment_" + random + "_" + stamp + ".txt", dt);
        }

        /// <summary>
        /// Use a Greedy agent as the adaptive agent
        /// </summary>
        private void runTest7(int random, String stamp, List<double[]> values)
        {
            agentType = AgentType.Test7;
            DataTable dt = createTable("Greedy", ROUNDS);
            //Run games
            for (int i = 0; i < GAMES; ++i)
            {
                //Print to screen every 100 games
                if (i % 100 == 0)
                    System.Console.WriteLine(i + " from 6th test");

                //Create new row for each game
                DataRow row = dt.NewRow();

                row["GameNum"] = i;

                //Generate epsilons
                eps0 = values[i][0];
                eps1 = values[i][1];
                eps2 = values[i][2];

                setEpsilon(row, eps0, eps1, eps2);

                //Generate competences
                competence0 = values[i][3];
                competence1 = values[i][4];
                competence2 = values[i][5];

                setCompetence(row, competence0, competence1, competence2);

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, _logger);

                for (int j = 0; j < NumAgents; j++)
                {
                    Agent agent = agents[j];
                    environment.addAgent(agent);
                }

                //Set only first random choice in strategic agent
                environment.agents[0].ChangeStrategyPoint = random;

                //create game
                MDPGame_old game = (MDPGame_old)GameFactory.create(Games.GameType.MDP, environment, ROUNDS);

                double utility = 0;

                //set competences to the agents
                environment.agents[0].Competence = new Blocks(competence0);
                environment.agents[1].Competence = new Blocks(competence1);
                environment.agents[2].Competence = new Blocks(competence2);

                //enter the row to the game (so the game will write the data in it)
                game.Dr1 = row;

                utility = game.start(_logger);

                row["utility"] = utility;

                dt.Rows.Add(row);
            }
            WriteToCSV("Greedy_" + random + "_" + stamp + ".txt", dt);
        }

        /// <summary>
        /// A regular run of the original experiment with random values for epsilon and competence
        /// </summary>
        private void runTest8(int random, String stamp, List<double[]> values)
        {
            agentType = AgentType.Test8;

            int rounds = 2000;
            int games = 50000;
            DataTable dt = createTable("LargeOldMDP", rounds);
            //Run games
            for (int i = 0; i < games; ++i)
            {
                //Print to screen every 100 games
                if (i % 100 == 0)
                    System.Console.WriteLine(i + " from 8th test");

                //Create new row for each game
                DataRow row = dt.NewRow();

                row["GameNum"] = i;


                //Generate epsilons
                eps0 = Generator.getNextDouble();
                eps1 = Generator.getNextDouble();
                eps2 = Generator.getNextDouble();

                setEpsilon(row, eps0, eps1, eps2);

                //Generate competences
                competence0 = Generator.getNextDouble();
                competence1 = Generator.getNextDouble();
                competence2 = Generator.getNextDouble();

                setCompetence(row, competence0, competence1, competence2);

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(environment, eps0, eps1, eps2, _logger);

                for (int j = 0; j < NumAgents; j++)
                {
                    Agent agent = agents[j];
                    environment.addAgent(agent);
                }

                //Set only first random choice in strategic agent
                environment.agents[0].ChangeStrategyPoint = random;

                //create game
                MDPGame_old game = (MDPGame_old)GameFactory.create(Games.GameType.MDP, environment, rounds);

                double utility = 0;

                //set competences to the agents
                environment.agents[0].Competence = new Blocks(competence0);
                environment.agents[1].Competence = new Blocks(competence1);
                environment.agents[2].Competence = new Blocks(competence2);

                //enter the row to the game (so the game will write the data in it)
                game.Dr1 = row;

                utility = game.start(_logger);

                row["utility"] = utility;

                dt.Rows.Add(row);
            }
            WriteToCSV("LargeOldMDP_" + random + "_" + stamp + ".txt", dt);
        }

        /// <summary>
        /// Run the experiments
        /// </summary>
        public override void runExperiment()
        {
            //Generate random values for epsilons and competences
            List<double[]> values = new List<double[]>(GAMES);
            
            for (int i = 0; i < GAMES; ++i)
            {
                double[] vec = new double[6];

                vec[0] = 0; //eps0
                vec[1] = Generator.getNextDouble() * 0.2; //eps1
                vec[2] = Generator.getNextDouble() * 0.1 + 0.9; //eps2
                vec[3] = Generator.getNextDouble() * 0.1; //comp0
                vec[4] = Generator.getNextDouble() * 0.2 + 0.8; //comp1
                vec[5] = Generator.getNextDouble() * 0.1 + 0.9; //comp2
                    
                values.Add(vec);
            }

            String stamp = DateTime.Now.ToString("yyyy.MM.dd-HH.mm.ss");

            //run the set of tests with different time when they change strategy from random
            runTest1(1, stamp, values);
            runTest2(1, stamp, values);
            runTest4(1, stamp, values);
            runTest5(1, stamp, values);
            runTest6(1, stamp, values);
            runTest7(1, stamp, values);

            runTest1(2, stamp, values);
            runTest2(2, stamp, values);
            runTest4(2, stamp, values);
            runTest5(2, stamp, values);
            runTest6(2, stamp, values);
            runTest7(2, stamp, values);

            runTest1(5, stamp, values);
            runTest2(5, stamp, values);
            runTest4(5, stamp, values);
            runTest5(5, stamp, values);
            runTest6(5, stamp, values);
            runTest7(5, stamp, values);

            runTest1(10, stamp, values);
            runTest2(10, stamp, values);
            runTest4(10, stamp, values);
            runTest5(10, stamp, values);
            runTest6(10, stamp, values);
            runTest7(10, stamp, values);

            runTest1(20, stamp, values);
            runTest2(20, stamp, values);
            runTest4(20, stamp, values);
            runTest5(20, stamp, values);
            runTest6(20, stamp, values);
            runTest7(20, stamp, values);

            runTest8(10, stamp, null);
        }

        /// <summary>
        /// Create a set of 3 agent
        /// </summary>
        /// <param name="env">The environment of the agent</param>
        /// <param name="epsilon0">Epsilon value for agent 0</param>
        /// <param name="epsilon1">Epsilon value for agent 1</param>
        /// <param name="epsilon2">Epsilon value for agent 2</param>
        /// <param name="logger">Logger</param>
        /// <returns>A list of 3 new agents for the game</returns>
        protected List<Agent> generatePlayers(Environment env, double epsilon0, double epsilon1, double epsilon2, Logger logger)
        {
            int numOfAgents = 3;
            List<Agent> retList = new List<Agent>(numOfAgents);

            AgentDirector director = new AgentDirector();

            //create a builder according to the AgentType
            switch (agentType)
            {
                case AgentType.Test1:
                    director.Builder = new MDPDiscoverBuilder();
                   
                    break;
                case AgentType.Test2:
                    director.Builder = new MDPDiscoverBuilder();
                   
                    break;
                case AgentType.Test3:
                    director.Builder = new MDPDiscoverBuilder();
                   
                    break;
                case AgentType.Test4:
                    director.Builder = new MDPDifferentBehaviorBuilder();
                   
                    break;
                case AgentType.Test5:
                    director.Builder = new MDPDiscoverBuilder();
                    
                    break;
                case AgentType.Test6:
                    director.Builder = new MDPDiscoverBuilder();
                    
                    break;
                case AgentType.Test7:
                    director.Builder = new GreedyBuilder();
                    
                    break;
                case AgentType.Test8:
                    director.Builder = new MDPDiscoverBuilder();
                    
                    break;
            }

            //create the new agents
            Agent ag;

            director.construct(0, env, epsilon0, logger);
            ag = director.getAgent();
            retList.Add(ag);

            director.Builder = new Agents.NormalBuilder();
            director.construct(1, env, epsilon1, logger);
            retList.Add(director.getAgent());

            director.Builder = new Agents.NormalBuilder();
            director.construct(2, env, epsilon2, logger);
            retList.Add(director.getAgent());

            return retList;
        }
    }
}
