package ui.panels.commands;

import java.util.ArrayList;
import java.util.HashMap;

import javax.swing.text.html.HTMLDocument.HTMLReader.IsindexAction;

import org.apache.log4j.Logger;
import org.omg.CORBA.Environment;

import gameEngine.agents.AdaptiveAgent;
import gameEngine.agents.Agent;
import gameEngine.agents.BasicAgent;
import gameEngine.agents.LoggedAgent;
import gameEngine.agents.choosingStrategy.ChoosingStrategy;
import gameEngine.agents.choosingStrategy.StrategyFactory;
import gameEngine.agents.choosingStrategy.EnumOfStrategy.Strategy_Type;
import gameEngine.agents.choosingStrategy.representation.ManuallAdaptiveRepresentation;
import gameEngine.agents.choosingStrategy.representation.StrategyRepresentation;
import gameEngine.agents.distrebution.Distribution;
import gameEngine.agents.distrebution.DistributionFactory;
import gameEngine.agents.distrebution.EnumOfDistribution.Distribution_Type;
import gameEngine.agents.distrebution.representation.DistributionRepresentation;
import gameEngine.agents.observationSystem.Observation;
import gameEngine.agents.observationSystem.ObservationFactory;
import gameEngine.agents.observationSystem.EnumOfObservation.Observation_Type;
import gameEngine.agents.observationSystem.analyzer.AnalayzerFactory;
import gameEngine.agents.observationSystem.analyzer.ObservationAnalyzer;
import gameEngine.agents.observationSystem.analyzer.EnumOfAnalyzer.Analyzer_Type;
import gameEngine.agents.teamMateModel.InitTeamMateModel;
import gameEngine.agents.teamMateModel.TeamMateModel;
import gameEngine.agents.teamMateModel.TeamMateModelFactory;
import gameEngine.agents.teamMateModel.EnumOfTeamateModel.Teammate_Meodel_Type;
import gameEngine.experiments.Experiment;
import gameEngine.experiments.ExperimentFactory;
import gameEngine.experiments.ExperimentImpl;
import gameEngine.game.TurnBasedScoredGame;

import Data.HashMapesOfParameters;
import ui.flowManager.FlowManagerImpl;
import ui.flowManager.expirimentProperties.AdditionalSettings;
import ui.flowManager.expirimentProperties.AgentRepresentation;
import ui.flowManager.expirimentProperties.ExperimentProperties;
import ui.flowManager.expirimentProperties.GameRepresentation;
import ui.flowManager.expirimentProperties.ExperimentProperties.Agent_Group;

/**
 * 
 * @author vladimir && alon 
 *	
 *	The class designed to run the experiment 
 */
public class RunExperimentCommand implements Command{

	private ExperimentProperties 					props = null;
	
	private  HashMap<String, Analyzer_Type> 		analyzerMap = null;
	private  HashMap<String, Teammate_Meodel_Type> 	teammateModelMap = null;
	private  HashMap<String, Observation_Type>  	observationMap = null;
	
	private final int 								gameNumber = 1;
	Logger log = Logger.getLogger(this.getClass());
	/**
	 * CTOR
	 * Get the Hash Maps from HashMapesOfParameters
	 */
	public RunExperimentCommand ()
	{
		//first initialize the HashMapesOfParameters
		HashMapesOfParameters.initializeHashMap();

		analyzerMap = HashMapesOfParameters.getAnalyzerMap();
		teammateModelMap = HashMapesOfParameters.getTeammateModelMap();
		observationMap = HashMapesOfParameters.getObservationMap();
	}
	
	/**
	 *  The function create list of games
	 *  Set every one of this games from ExperimentProperties object
	 *  	(The object where user choices where set)
	 *  
	 *  And run those games
	 */
	public void execute()
	{	
		// Take the ExperimentProperties
		props = FlowManagerImpl.getInstance().getProps();
		if(props == null) 
			return;

		//create the control && studied players
		ArrayList<Agent> studiedPlayers = createStudiedPlayers(gameNumber);
		ArrayList<Agent> controlPlayers = createControlPlayers(gameNumber, studiedPlayers.size());
		
		//create Observation of the game
		GameRepresentation game = props.getGame(gameNumber);
		String obsName = props.getObservationType(gameNumber);		
		Observation_Type observationType = observationMap.get(obsName);
		Observation observation = ObservationFactory.create(observationType, 1 + game.getNumOfPlayers(Agent_Group.CONTROL_AGENT));
		ExperimentImpl experiment = new ExperimentImpl(game.getNumOfRepeats(), game.getNumOfTurns());
		experiment.setObservation(observation);
		experiment.setEnvironment(null);
		//add the players
		for (int i = 0; i < controlPlayers.size(); ++i)
		{
			experiment.addBasicAgent(controlPlayers.get(i));
		}
		
		for (int i = 0; i < studiedPlayers.size(); ++i)
		{
			experiment.addAdaptiveAgent(studiedPlayers.get(i));
		}
		
		log.debug("Experiment created, next stage is to run it");
		//run the experiment
		experiment.run();
	}
	
	
	private ArrayList<Agent> createStudiedPlayers(int gameNumber)
	{
		ArrayList<Agent> players = new ArrayList<Agent>();
		AgentRepresentation 	agentRepresantation = null;
		Agent 					agent = null;	
		int 					numOfplayers = 0;
		double 					EGreedy = 0;
		double 					competence = 0;
		
		
		//get number of players and turns in game number i
		numOfplayers =  props.getNumOfPlayers(gameNumber, Agent_Group.STUDIED_AGENT);

		// loop for create the players
		for(int i = 1; i <= numOfplayers; ++i)
		{
			//get the agent 
			agentRepresantation = props.getPlayer(gameNumber, i, Agent_Group.STUDIED_AGENT);
			EGreedy = agentRepresantation.getEpsilon();
			competence = agentRepresantation.getCompetence();
			
			//get the strategy to decide what player to create
			StrategyRepresentation strRepresentation = agentRepresantation.getStrategyType();
			//case adaptive agent
			if (strRepresentation instanceof ManuallAdaptiveRepresentation)
				agent = new AdaptiveAgent(i, EGreedy, competence);
			//case Not adaptive agent
			else
				agent = new BasicAgent(i, EGreedy, competence);
									
			players.add(agent);
		}
		
		// Set the players that was created
		for(int i = 1; i <= numOfplayers; ++i)
		{
			agentRepresantation = props.getPlayer(gameNumber, i,Agent_Group.STUDIED_AGENT);
			Analyzer_Type analyzerType = analyzerMap.get(agentRepresantation.getAnalyzerType());
			//Strategy_Type strategyType = strategyMap.get(agentRepresantation.getStrategyType().getStrategyName());
			Teammate_Meodel_Type teammateMeodelType = teammateModelMap.get(agentRepresantation.getTeammateModelType());		
			StrategyRepresentation strRepresentation = agentRepresantation.getStrategyType();
			//create Distribution, first get tDistributionRepresentation and then type for factory		
			DistributionRepresentation diRepresentation = agentRepresantation.getDistributionType();
			//important that the if will be in this order, because the AdaptiveAgent is BasicAgent
			if(players.get(i - 1) instanceof AdaptiveAgent)
			{
				//check if the agent will stop		
				AdditionalSettings additionalSettings = agentRepresantation.getAdditionalSettings();		
				 ((AdaptiveAgent)players.get(i - 1)).init(analyzerType, strRepresentation, teammateMeodelType,
						 diRepresentation, players, additionalSettings);
			}
			//agents start to count from zero -> menus one 
			else if(players.get(i - 1) instanceof BasicAgent)
				 ((BasicAgent)players.get(i - 1)).init(analyzerType,strRepresentation,teammateMeodelType,
						  diRepresentation,players);
		}
		
		return players;
	}
	
	private ArrayList<Agent> createControlPlayers(int gameNumber, int add)
	{
		ArrayList<Agent> players = new ArrayList<Agent>();
		AgentRepresentation 	agentRepresantation = null;
		Agent 					agent = null;	
		int 					numOfplayers = 0;
		double 					EGreedy = 0;
		double 					competence = 0;
		
		
		//get number of players and turns in game number i
		numOfplayers =  props.getNumOfPlayers(gameNumber, Agent_Group.CONTROL_AGENT);
		
		//create Observation of the game
		//String obsName = props.getObservationType(gameNumber);		
		//Observation_Type observationType = observationMap.get(obsName);
		//observation = ObservationFactory.create(observationType, numOfplayers);
		
		
		// loop for create the players
		for(int i = 1; i <= numOfplayers; ++i)
		{
			//get the agent 
			agentRepresantation = props.getPlayer(gameNumber, i, Agent_Group.CONTROL_AGENT);
			EGreedy = agentRepresantation.getEpsilon();
			competence = agentRepresantation.getCompetence();
			
			//get the strategy to decide what player to create
			StrategyRepresentation strRepresentation = agentRepresantation.getStrategyType();
			//case adaptive agent
			if (strRepresentation instanceof ManuallAdaptiveRepresentation)
				agent = new AdaptiveAgent(i+add, EGreedy, competence);
			//case Not adaptive agent
			else
				agent = new BasicAgent(i+add, EGreedy, competence);
									
			players.add(agent);
		}
		
		// Set the players that was created
		for(int i = 1; i <= numOfplayers; ++i)
		{
			agentRepresantation = props.getPlayer(gameNumber, i,Agent_Group.CONTROL_AGENT);
			Analyzer_Type analyzerType = analyzerMap.get(agentRepresantation.getAnalyzerType());
			//Strategy_Type strategyType = strategyMap.get(agentRepresantation.getStrategyType().getStrategyName());
			Teammate_Meodel_Type teammateMeodelType = teammateModelMap.get(agentRepresantation.getTeammateModelType());		
			StrategyRepresentation strRepresentation = agentRepresantation.getStrategyType();
			//create Distribution, first get tDistributionRepresentation and then type for factory		
			DistributionRepresentation diRepresentation = agentRepresantation.getDistributionType();
			//important that the if will be in this order, because the AdaptiveAgent is BasicAgent
			if(players.get(i - 1) instanceof AdaptiveAgent)
			{
				//check if the agent will stop		
				AdditionalSettings additionalSettings = agentRepresantation.getAdditionalSettings();		
				 ((AdaptiveAgent)players.get(i - 1)).init(analyzerType, strRepresentation, teammateMeodelType,
						 diRepresentation, players, additionalSettings);
			}
			//agents start to count from zero -> menus one 
			else if(players.get(i - 1) instanceof BasicAgent)
				 ((BasicAgent)players.get(i - 1)).init(analyzerType,strRepresentation,teammateMeodelType,
						  diRepresentation,players);
		}
		
		return players;
	}
	
	
	/**
	 * The function create, initialize and return a new Game
	 * @param gameNMumber
	 * @return game
	 */
	/*
	private TurnBasedScoredGame createGame(int gameNumber)
	{		
		TurnBasedScoredGame		game =	null;
		AgentRepresentation 	agentRepresantation = null;
		TeamMateModel 			teammateModel = null;
		ChoosingStrategy		strategy = null;
		Distribution 			distribution = null;
		ObservationAnalyzer		analyzer = null;
		Observation				observation = null;
		Agent 					agent = null;
		
		int 					numOfplayers = 0;
		int 					numOfTurns = 0;
		double 					EGreedy = 0;
		double 					competence = 0;
		
		//get number of players and turns in game number i
		numOfplayers =  props.getNumOfPlayers(gameNumber);
		numOfTurns = props.getNumOfTurns(gameNumber);
		
		//create Observation of the game
		String obsName = props.getObservationType(gameNumber);		
		Observation_Type observationType = observationMap.get(obsName);
		observation = ObservationFactory.create(observationType, numOfplayers);
		
		//for every game we create a new list of agents
		agents = new ArrayList<Agent>();
		
		// loop for create the players
		for(int i = 1; i <= numOfplayers; ++i)
		{
			//get the agent 
			agentRepresantation = props.getPlayer(gameNumber, i, Agent_Group.CONTROL_AGENT);
			EGreedy = agentRepresantation.getEpsilon();
			competence = agentRepresantation.getCompetence();
			
			//get the strategy to decide what player to create
			StrategyRepresentation strRepresentation = agentRepresantation.getStrategyType();
			//case adaptive agent
			if (strRepresentation instanceof ManuallAdaptiveRepresentation)
				agent = new AdaptiveAgent(i, EGreedy, competence);
			//case Not adaptive agent
			else
				agent = new BasicAgent(i, EGreedy, competence);
			
						
			agents.add(agent);
		}
		
		// Set the players that was created
		for(int i = 1; i <= numOfplayers; ++i)
		{
			agentRepresantation = props.getPlayer(gameNumber, i,Agent_Group.CONTROL_AGENT);
			Analyzer_Type analyzerType = analyzerMap.get(agentRepresantation.getAnalyzerType());
			//Strategy_Type strategyType = strategyMap.get(agentRepresantation.getStrategyType().getStrategyName());
			Teammate_Meodel_Type teammateMeodelType = teammateModelMap.get(agentRepresantation.getTeammateModelType());		
			StrategyRepresentation strRepresentation = agentRepresantation.getStrategyType();
			//create Distribution, first get tDistributionRepresentation and then type for factory		
			DistributionRepresentation diRepresentation = agentRepresantation.getDistributionType();
			//important that the if will be in this order, because the AdaptiveAgent is BasicAgent
			if(agents.get(i - 1) instanceof AdaptiveAgent)
			{
				//check if the agent will stop		
				AdditionalSettings additionalSettings = agentRepresantation.getAdditionalSettings();		
				 ((AdaptiveAgent)agents.get(i - 1)).init(analyzerType, strRepresentation, teammateMeodelType,
						 diRepresentation, agents, additionalSettings);
			}
			//agents start to count from zero -> menus one 
			else if(agents.get(i - 1) instanceof BasicAgent)
				 ((BasicAgent)agents.get(i - 1)).init(analyzerType,strRepresentation,teammateMeodelType,
						  diRepresentation,agents);

				
		}
		
		//create and return the game
		game = new TurnBasedScoredGame(numOfTurns, agents, null, observation, gameNumber);
		
		return game;
	}
	//end of TurnBasedScoredGame 
	*/
	
}
