﻿using System;
using System.Collections.Generic;
using AdaptiveAgents.Loggers;
using AdaptiveAgents.Agents;
using AdaptiveAgents.Games;
using AdaptiveAgents.Distributions;

namespace AdaptiveAgents.Experiments
{
    public class NormalExperiment : Experiment
    {
        //path to the main directory of the project
        private const string PATH = "..//..//";

        /// <summary>
        /// Constructor - 
        /// </summary>
        public NormalExperiment()
            : base(50, 3, 50) {}

        /// <summary>
        /// run the experiment
        /// </summary>
        public override void runExperiment()
        {
            //for each game...
            for (int gameNum = 0; gameNum < numGames; gameNum++)
            {

                // first let's set the environment
                Environment environment = new Environment();

                //Create a new population of players
                List<Agent> agents = generatePlayers(NumAgents, environment, defaultEpsilon, _logger);

                AgentDirector director = new AgentDirector();

                //for each agent...
                for (int i = 0; i < NumAgents; i++)
                {
                    Agent agent = agents[i];

                    IDistribution distribution;

                    if (AdaptiveAgents.readDistributions || gameNum > 0)
                        // reading in the distribution from file
                        distribution = new Blocks(PATH + "Distributions/distribution." + i);
                    else
                    { 
                        // creating a new distribution
                        distribution = new Blocks();
                        distribution.generateRandomValues();
                        //distribution.write(PATH + "Distributions/distribution." + i);

                    }
                    //give the distribution to the agent
                    agent.Competence = distribution;
                    environment.addAgent(agent);
                }

                //print the agent to logger
                environment.printAgents();
                //create the game
                Game game = GameFactory.create(Games.GameType.Normal, environment, numRounds);

                //run the game and save the utility coming back
                double utility = game.start(_logger);

                //sum up the utility of all games
                utilitySum += utility;
                //sum up the squared utility of all games
                utilitySquaredSum += utility * utility;

                // Now let the agent run for one game
            }
            //get average utility
            double avgUtility = utilitySum / numGames;
            double variance = 0, stdDev = 0;
            //comp variance and standard deviation of the games
            if (numGames > 1)
            {
                variance = (numGames * utilitySquaredSum - utilitySum * utilitySum)
                        / (numGames * (numGames - 1));
                stdDev = Math.Sqrt(variance);
            }

            //write data to logger
            AdaptiveAgents.logger.WriteLine(MessageType.SummerizeGameResults,
                numGames, numRounds, avgUtility, stdDev);
        }
    }
}
