import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Random;

import agentFramework.agent.Agent;
import agentFramework.core.Core;
import agentFramework.core.env.Environment;
import agentFramework.strategy.EpsQLStrategy;
import agentFramework.strategy.QLStrategy;
import agentFramework.strategy.Strategy;
import agentFramework.tools.InfluenceGroup;
import agentFramework.tools.LeaderDetector;
import agentFramework.utils.Pair;


public class DilemmaTester {

	// run a customized random test 
		public static void runTest(int testId, int nAgents, int nBully, int nQL, int nEpsQL, int nEdges, int nIterations, int nEpochs,
								   int PHI, int PI, double SIGMA, double RHO)
		{
			int[] 					type = new int[nAgents];
			boolean[][]				neighbor = new boolean[nAgents][nAgents];
			Environment				env = new DilemmaEnvironment("");
			Core					core = new DilemmaCore(env, nIterations, nEpochs);
			long					time = System.currentTimeMillis();
			long					edges = nEdges;

			System.out.println("Starting test " + testId);
			
			// set the core in the environment
			env.addCore(core);

			// add the nAgent profiles
			for (int i = 0; i < nAgents; i++)
			{
				if (i < nBully)
				{
					type[i] = Constants.TYPE_BULLY;
				}
				else if ( (i >= nBully) && (i < nBully + nQL) )
				{
					type[i] = Constants.TYPE_QL;
				}
				else if ( (i >= nBully + nQL) && (i < nBully + nQL + nEpsQL) )
				{
					type[i] = Constants.TYPE_EPS_QL;
				}
			}
			
			// shuffle the agent profiles
			for (int k = 0; k < 3*nAgents; k++)
			{
				Random 		r = new Random();
				int 		i = r.nextInt(nAgents);
				int 		j = r.nextInt(nAgents);
				int 		aux;
				
				aux = type[i];
				type[i] = type[j];
				type[j] = aux;
			}
			
			// now create the agents
			for (int i = 0; i < nAgents; i++)
			{
				Random 			r = new Random();
				Agent 			a = new DilemmaGuy(i, Core.id, new DilemmaGuyPersonality(r.nextInt(4), r.nextDouble()), env);
				Strategy		strategy = null;
				
				switch (type[i])
				{
					case Constants.TYPE_BULLY:
					{
						strategy = new DilemmaBullyStrategy(env, a);
						break;
					}
					
					case Constants.TYPE_QL:
					{
						strategy = new QLStrategy(env, a, Constants.QL_ALPHA, Constants.QL_DEFAULT_PENALTY);
						break;
					}
					
					case Constants.TYPE_EPS_QL:
					{
						strategy = new EpsQLStrategy(env, a, Constants.QL_EPSILON, Constants.QL_ALPHA, Constants.QL_DEFAULT_PENALTY);
						break;
					}
					
					default: break;
				}

				a.setStrategy(strategy);
				env.addAgent(a);
			}
			
			// init the neighbor matrix
			for (int i = 0; i < nAgents; i++)
				for (int j = 0; j < nAgents; j++)
				{
					neighbor[i][j] = false;
				}
			
			// add the edges
			while (nEdges > 0)
			{
				ArrayList< Pair<Integer, Integer> >  list = new ArrayList< Pair<Integer, Integer> >();
				Random 	r = new Random();
				Pair<Integer, Integer>	p;
				
				// compute the unconnected pair of agents
				for (int i = 0; i < nAgents - 1 ; i++)
					for (int j = i + 1; j < nAgents; j++)
					{
						if (!neighbor[i][j])
						{
							list.add(new Pair<Integer, Integer>(i, j));
						}
					}
				
				p = list.remove(r.nextInt(list.size()));
				neighbor[p.first][p.second] = neighbor[p.second][p.first] = true;
				
				nEdges--;
			}
			
			env.addNeighborMatrix(neighbor);
			
			LeaderDetector ld = new LeaderDetector(nAgents, neighbor, PHI, PI, SIGMA, RHO);
			((DilemmaCore) core).setLeaderDetector(ld);
			
			System.out.println("Starting simulation!");
			
			// start the simulation
			core.start();
			
			// wait the end of the simulation
			try
			{
				core.join();
			}
			catch(Exception e) { e.printStackTrace(); }
			
			System.out.println("Simulation ended!");
			
			System.out.println("elapsed time: " + 1.0 * (System.currentTimeMillis() - time) / 1000 + " sec");
		
			System.out.println("\n### Statistics:");
			System.out.println("---------------");
			
			System.out.println("agents=" + nAgents + "  edges=" + edges + "  bully=" + nBully + "  QL=" + nQL + "  Eps_QL=" + nEpsQL);
			
			System.out.println(printStatistics((DilemmaCore)core));
		}
		
		public static String printStatistics(DilemmaCore core)
		{	
			String st = "";
			core.leaderDetector.findLeaders();
			
			ArrayList<InfluenceGroup> groupList = core.leaderDetector.getInfluenceGroups();
			Environment env = core.environment;
			int[]			leaders = new int[3];
			String[]		profile = {"Bully", "QL", "Eps_QL"};
			int[]			count = new int[3];
			double[]        meanScore = new double[3];
			double[]		minScore = new double[3];
			double[]		maxScore = new double[3];
			double			bestScore = -Double.MAX_VALUE;
			int				bestScoreId = -1;
			double[]		meanAdepts = new double[3];
			double[]		minAdepts = new double[3];
			double[]		maxAdepts = new double[3];
			
			
			for (int i = 0; i < 3; i++)
			{
				minScore[i] = Double.MAX_VALUE;
				maxScore[i] = -Double.MAX_VALUE;
				maxAdepts[i] = -1;
				minAdepts[i] = env.getAgentCount() * 100.0;
				meanScore[i] = meanAdepts[i] = 0;
			}
			
			for (InfluenceGroup group : groupList)
			{
				Agent	a = env.getAgentById(group.root);
				String	type = null;
				int		idx = 0;
				
				if (a.agentStrategy instanceof DilemmaBullyStrategy)
				{
					type = "Bully";
					idx = 0;
				}
				else if (a.agentStrategy instanceof QLStrategy)
				{
					type = "QL_Agent";
					idx = 1;
				}
				else if (a.agentStrategy instanceof EpsQLStrategy)
				{
					type = "Eps_QL_Agent";
					idx = 2;
				}
				
				
				String s = "agent=" + a.name + 
						   "  type=" + type +
						   "  isLeader=" + group.hasLeader + 
						   "  adepts=" + group.adeptList.size() +
						   "  score=" + a.getEarnings();
				
				st = st + s + "\n";
				
				
				if(group.hasLeader)
				{
					leaders[idx]++;
					meanScore[idx] += a.getEarnings();
					
					minScore[idx] = Math.min(minScore[idx], a.getEarnings());
					maxScore[idx] = Math.max(maxScore[idx], a.getEarnings());
					
					meanAdepts[idx] += group.adeptList.size();
					minAdepts[idx] = Math.min(minAdepts[idx], group.adeptList.size());
					maxAdepts[idx] = Math.max(maxAdepts[idx], group.adeptList.size());
				}
				
				if ( bestScore < a.getEarnings() )
				{
					bestScore = a.getEarnings();
					bestScoreId = idx;
				}
				
				count[idx]++;
			}
			
			st = st + "bestScoreOfAll=" + bestScore + "\n";
			st = st + "bestScoreProfile=" + profile[bestScoreId] + "\n";
			
			for (int i = 0; i < 3; i++)
			{
				String	s;
				
				s = "\nprofile=" + profile[i] + 
					"\n    leaders=" + leaders[i] +
					"\n    minAdepts=" + minAdepts[i] +
					"\n    maxAdepts=" + maxAdepts[i] +
					"\n    meanAdepts=" + meanAdepts[i]/leaders[i] +
					"\n    minScore=" + minScore[i] +
					"\n    maxScore=" + maxScore[i] +
					"\n    meanScore=" + meanScore[i]/leaders[i];
					
				st = st + s + "\n";
			}
			
			st = st + "\n\n";
			return st;
		}
		// run a customized random test 
		public static void runTest(Environment env, DilemmaCore core, String fileName, boolean step)
		{
			long					time = System.currentTimeMillis();
			int 					nAgents = env.getAgentCount();
			int 					edges = 0;
			int 					nBully = 0;
			int 					nQL = 0;
			int 					nEpsQL = 0;

			System.out.println("Starting test: " + fileName);
			System.out.println("Starting simulation!");
			
			// start the simulation
			core.start();
			
			if (step == false)
			{
				// wait the end of the simulation
				try
				{
					core.join();
				}
				catch(Exception e) { e.printStackTrace(); }
				
				System.out.println("Simulation ended!");
				
				System.out.println("elapsed time: " + 1.0 * (System.currentTimeMillis() - time) / 1000 + " sec");
			
				ArrayList<Agent> agentList = env.getAgentList();
				for (Agent a : agentList)
				{
					edges += env.getNeighbors(a).size();
					
					if (a.agentStrategy instanceof DilemmaBullyStrategy)
					{
						nBully++;
					}
					else if (a.agentStrategy instanceof QLStrategy)
					{
						nQL++;
					}
					else if (a.agentStrategy instanceof EpsQLStrategy) 
					{
						nEpsQL++;
					}
				}
				edges /= 2;
				
				
				System.out.println("\n### Statistics:");
				System.out.println("---------------");
				
				System.out.println("agents=" + nAgents + "  edges=" + edges + "  bully=" + nBully + "  QL=" + nQL + "  Eps_QL=" + nEpsQL);
				
				System.out.println(DilemmaTester.printStatistics((DilemmaCore)core));
			}
		}
		
		// generates a customized random test case 
		public static void genTest(String fileName, int nAgents, int nBully, int nQL,  int nEpsQL, int cheapCost, int expensiveCost, int maxJoy,
									int joy_generation, int money_imp_generation,
									int nEdges, int nIterations, int nEpochs,
								   int PHI, int PI, double SIGMA, double RHO)
		{
			int[] 					type = new int[nAgents];
			boolean[][]				neighbor = new boolean[nAgents][nAgents];
			BufferedWriter    		bw = null;
			String					buffer = "";

			try
			{
				bw  = new BufferedWriter(new FileWriter(fileName));
				
				buffer = nAgents + "\n";
				
				buffer += cheapCost + " " + expensiveCost + "\n"; 
		
				// add the nAgent profiles
				for (int i = 0; i < nAgents; i++)
				{
					if (i < nBully)
					{
						type[i] = Constants.TYPE_BULLY;
					}
					else if ( (i >= nBully) && (i < nBully + nQL) )
					{
						type[i] = Constants.TYPE_QL;
					}
					else if ( (i >= nBully + nQL) && (i < nBully + nQL + nEpsQL) )
					{
						type[i] = Constants.TYPE_EPS_QL;
					}
				}
				
				// shuffle the agent profiles
				for (int k = 0; k < 3*nAgents; k++)
				{
					Random 		r = new Random();
					int 		i = r.nextInt(nAgents);
					int 		j = r.nextInt(nAgents);
					int 		aux;
					
					aux = type[i];
					type[i] = type[j];
					type[j] = aux;
				}
				
				Random ra = new Random();
				for (int i = 0; i < nAgents; i++)
				{
					buffer += type[i] + " ";
					switch (joy_generation)
					{
					case Main.MAX_JOY:
						buffer += maxJoy + " ";
						break;
					case Main.MIN_JOY:
						buffer += "1 ";
						break;
					case Main.RANDOM_JOY:
						buffer += ra.nextInt(maxJoy) + " ";
					}
					
					switch (money_imp_generation)
					{
					case Main.MAX_MONEY_IMP:
						buffer += "1.0\n";
						break;
					case Main.MIN_MONEY_IMP:
						buffer += "0.01\n";
						break;
					case Main.RANDOM_MONEY_IMP:
						buffer += ra.nextDouble() + "\n";
						break;
					}
				}
				
				buffer += nEdges + "\n" +
						  nIterations + "\n" +
						  nEpochs + "\n" +
						  PHI + "\n" +
						  PI + "\n" +
						  SIGMA + "\n" +
						  RHO + "\n";
					
				
				// init the neighbor matrix
				for (int i = 0; i < nAgents; i++)
					for (int j = 0; j < nAgents; j++)
					{
						neighbor[i][j] = false;
					}
				
				// add the edges
				while (nEdges > 0)
				{
					ArrayList< Pair<Integer, Integer> >  list = new ArrayList< Pair<Integer, Integer> >();
					Random 	r = new Random();
					Pair<Integer, Integer>	p;
					
					// compute the unconnected pair of agents
					for (int i = 0; i < nAgents - 1 ; i++)
						for (int j = i + 1; j < nAgents; j++)
						{
							if (!neighbor[i][j])
							{
								list.add(new Pair<Integer, Integer>(i, j));
							}
						}
					
					p = list.remove(r.nextInt(list.size()));
					neighbor[p.first][p.second] = neighbor[p.second][p.first] = true;
					
					nEdges--;
				}
				
				for (int i = 0; i < nAgents - 1; i++)
					for (int j = i + 1; j < nAgents; j++)
					{
						if (!neighbor[i][j]) continue;
						
						buffer += i + " " + j + "\n";
					}
				
				bw.write(buffer);
				bw.close();
			}
			catch(IOException e) {e.printStackTrace();}
		}

		
}
