package org.slusk.thynwor.neat;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import java.util.logging.XMLFormatter;

import ojc.util.Misc;

import org.jgap.Chromosome;
import org.slusk.thynwor.corestructures.Environment;
import org.slusk.thynwor.util.RunSetup;
import org.slusk.thynwor.util.Util;

import com.anji.hyperneat.modular.ModularHyperNeatFitnessFunction;
import com.anji.hyperneat.nd.NDActivatorArray;
import com.anji.hyperneat.nd.NDActivatorArray.ActivatorArrayIterator;
import com.anji.hyperneat.onlinereinforcement.OnlineLearningActivatorNDBase;
import com.anji.hyperneat.onlinereinforcement.OnlineLearningNetType;
import com.anji.hyperneat.onlinereinforcement.trainingbank.TrainingBank;
import com.anji.neat.Evolver;
import com.anji.neat.Run;
import com.anji.util.Properties;

public class SingleAgentSensoryArraySubstrateFitnessFunction extends ModularHyperNeatFitnessFunction {
	
	private static Logger logger = Logger.getLogger(SingleAgentSensoryArraySubstrateFitnessFunction.class.getName());
	private static Logger logger_resultsOnly = Logger.getLogger(SingleAgentSensoryArraySubstrateFitnessFunction.class.getName() + ".ResultsOnly");

	private static final String LEARNING_TYPE_KEY = "ann.hyperneat.online.learning.type";
	private static final String NUM_RUNS_KEY = "num.runs";
	private static final String BASE_DIR_KEY = "persistence.base.dir";
	private static final String USE_IMPROVEMENT_FACTOR = "fitness.useImprovementFactor";
	
	private static final long serialVersionUID = -5913552257257746198L;
	
	private static OnlineLearningNetType learningType;

	private int popSize;
	private int numGens;
		
	private double fitnessRecord = -40000;
	private double performanceRecord = -1;
	private double foodCollectedRecord = 0;
	private int foodCollectedRecordGen = -1;
	private boolean runSuccessful = false;
	private long bestIndividual; 
	private int subjectCount = 0;
	private Object lock;	//for writing to static members

	private static final int simulationsToRun = 3;
	private static final int timeSteps = 50;
	private static int maxNumTrainingEpochs;
	private static float mseThreshold;

	private static final int maxFitnessValue = 1000;
	private static final double foodCollectedWgt = 0.2;
	private static final double moveFoodTowardGoalWgt = 0.25;
	private static final double moveTowardSignalWgt = 0.5;
	private static final double assistWithFoodWgt = 0.4;
	private static final double attachedToFoodWgt = 0.05;
	private static final double handsFullWgt = 0.05;
	private static final double hitWallWgt = 0.05;
	private static final double improvementFactorWgt = 0.2;
	
	private static final double foodCollectedGoal = ((double)timeSteps) / 5;	// Return 1 food every x steps
	private static final double moveFoodTowardGoalGoal = (double) timeSteps * 6;	// any given turn, one bot is moving toward goal with food
	private static final double moveTowardSignalGoal = (double) timeSteps * 6;
	private static final double assistWithFoodGoal = (double) timeSteps * 6;
	private static final double attachedToFoodGoal = (double) timeSteps * 6;
	private static final double handsFullGoal = (double) timeSteps * 6;
	private static final double hitObstacleGoal = (double) timeSteps * 6;
	private static final double droppedFoodGoal = (double) timeSteps * 6;
	private static final double improvementFactorGoal = 1.77;	// I don't have time for this.
	
	// These values control when to begin evaluating without single agent tow mode. 
	private static final int foodCollectionThreshold = 6;
	private static int genToStopSingleAgentTowMode = Integer.MAX_VALUE;
	
	private double foodCollectedFactorStart;
	private double foodCollectedFactorEnd;
	private double moveFoodTowardGoalFactorStart;
	private double moveFoodTowardGoalFactorEnd;
	private double moveTowardSignalFactorStart;
	private double moveTowardSignalFactorEnd;
	private double assistWithFoodFactorStart;
	private double assistWithFoodFactorEnd;
	private double attachedToFoodFactorStart;
	private double attachedToFoodFactorEnd;
	private double handsFullFactorStart;
	private double handsFullFactorEnd;
	private double hitObstacleFactorStart;
	private double hitObstacleFactorEnd;
	private double droppedFoodFactorStart;
	private double droppedFoodFactorEnd;
	        
	private double foodCollectedSlope;
	private double moveFoodTowardGoalSlope;
	private double moveTowardSignalSlope;
	private double assistWithFoodSlope;
	private double attachedToFoodSlope;
	private double handsFullSlope;
	private double hitObstacleSlope;
	private double droppedFoodSlope;
	
	private static final boolean resetLearningEachSimulation = false;
	private boolean useImprovementFactor;	// This is of no use if the above is true.
	
	private static FileHandler fileTxt;
	private static FileHandler fileTxt_resultsOnly;
//	private SimpleFormatter formatterTxt;
	private String runName;

	private long[] _chromosome;
	private float[] _fitness;
//	private float[] _performance;
	private float[] _foodCollected;
	private float[] _attachedToFood;
	private float[] _assistWithFood;
	private float[] _moveTowardSignal;
	private float[] _moveFoodTowardGoal;
	private float[] _handsFull;
	private float[] _hitObstacle;
	private float[] _droppedFood;
	private float[] _improvementFactor;
	private float[] _improvement1Over0;
	private float[] _improvement2Over1;
	private float[] _improvement2Over0;
	
	private static final float fitnessDivisor = timeSteps * 6;	// 6 = # of agents; idea is that if every agent does this every turn, that is the best/worst possible performance.
		
	static {

	}
	
	public SingleAgentSensoryArraySubstrateFitnessFunction() {

	}
	
	public void init(Properties props) {
		super.init(props);
		
		logger.setLevel(Level.INFO);

        learningType = OnlineLearningNetType.valueOf(props.getProperty(LEARNING_TYPE_KEY));
        runName = props.getProperty("run.name");
		popSize = props.getIntProperty("popul.size");
		numGens = props.getIntProperty("num.generations");
		lock = new Object();
		foodCollectedFactorStart = props.getDoubleProperty("sasasFF.foodCollectedFactorStart");
		foodCollectedFactorEnd = props.getDoubleProperty("sasasFF.foodCollectedFactorEnd");
		moveFoodTowardGoalFactorStart = props.getDoubleProperty("sasasFF.moveFoodTowardGoalFactorStart");
		moveFoodTowardGoalFactorEnd = props.getDoubleProperty("sasasFF.moveFoodTowardGoalFactorEnd");
		moveTowardSignalFactorStart = props.getDoubleProperty("sasasFF.moveTowardSignalFactorStart");
		moveTowardSignalFactorEnd = props.getDoubleProperty("sasasFF.moveTowardSignalFactorEnd");
		assistWithFoodFactorStart = props.getDoubleProperty("sasasFF.assistWithFoodFactorStart");
		assistWithFoodFactorEnd = props.getDoubleProperty("sasasFF.assistWithFoodFactorEnd");
		attachedToFoodFactorStart = props.getDoubleProperty("sasasFF.attachedToFoodFactorStart");
		attachedToFoodFactorEnd = props.getDoubleProperty("sasasFF.attachedToFoodFactorEnd");
		handsFullFactorStart = props.getDoubleProperty("sasasFF.handsFullFactorStart");
		handsFullFactorEnd = props.getDoubleProperty("sasasFF.handsFullFactorEnd");
		hitObstacleFactorStart = props.getDoubleProperty("sasasFF.hitWallFactorStart");
		hitObstacleFactorEnd = props.getDoubleProperty("sasasFF.hitWallFactorEnd");
		droppedFoodFactorStart = props.getDoubleProperty("sasasFF.droppedFoodFactorStart");
		droppedFoodFactorEnd = props.getDoubleProperty("sasasFF.droppedFoodFactorEnd");
		
		useImprovementFactor = props.getBooleanProperty(USE_IMPROVEMENT_FACTOR);
		if (props.getBooleanProperty("TrainingBank.useTrainingBank", false)) {
			maxNumTrainingEpochs = props.getIntProperty("TrainingBank.maxNumTrainingEpochs");
			mseThreshold = props.getFloatProperty("TrainingBank.mseThreshold");
		}
		
		foodCollectedSlope = (foodCollectedFactorEnd - foodCollectedFactorStart) / numGens;
		moveFoodTowardGoalSlope = (moveFoodTowardGoalFactorEnd - moveFoodTowardGoalFactorStart) / numGens;
		moveTowardSignalSlope = (moveTowardSignalFactorEnd - moveTowardSignalFactorStart) / numGens;
		assistWithFoodSlope = (assistWithFoodFactorEnd - assistWithFoodFactorStart) / numGens;
		attachedToFoodSlope = (attachedToFoodFactorEnd - attachedToFoodFactorStart) / numGens;
		handsFullSlope = (handsFullFactorEnd - handsFullFactorStart) / numGens;
		hitObstacleSlope = (hitObstacleFactorEnd - hitObstacleFactorStart) / numGens;
		droppedFoodSlope = (droppedFoodFactorEnd - droppedFoodFactorStart) / numGens;
		
		initializeStatRecords();
		
		if (null == fileTxt) {
			try {
				fileTxt = new FileHandler( "./runs/"+runName +"/sasas_" + runName + "_"+ Util.timeStamp() +".log");
				SimpleFormatter formatterTxt = new SimpleFormatter();
				fileTxt.setFormatter(formatterTxt);
				logger.addHandler(fileTxt);
				
//				fileTxt_resultsOnly = new FileHandler( "./runs/"+runName +"/sasas_" + runName + "_"+ Util.timeStamp() +"_resultsOnly.log");
//				XMLFormatter formatterXml = new XMLFormatter();
//				fileTxt_resultsOnly.setFormatter(formatterXml);
//				logger_resultsOnly.addHandler(fileTxt_resultsOnly);
				
				fileTxt_resultsOnly = new FileHandler( "./runs/"+runName +"/sasas_" + runName + "_"+ Util.timeStamp() +"_resultsOnly.log");
				formatterTxt = new SimpleFormatter();
				fileTxt_resultsOnly.setFormatter(formatterTxt);
				logger_resultsOnly.addHandler(fileTxt_resultsOnly);
				
			} catch (SecurityException e) {
				e.printStackTrace();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
	
	private void initializeStatRecords() {
		int size = numGens*popSize;
		_chromosome = new long[size];
		_fitness = new float[size];
//		_performance = new float[size];
		_foodCollected = new float[size];
		_attachedToFood = new float[size];
		_assistWithFood = new float[size];
		_moveTowardSignal = new float[size];
		_moveFoodTowardGoal = new float[size];
		_handsFull = new float[size];
		_hitObstacle = new float[size];
		_droppedFood = new float[size];
		_improvementFactor = new float[size];
		_improvement1Over0 = new float[size];
		_improvement2Over1 = new float[size];
		_improvement2Over0 = new float[size];
		
		fitnessRecord = -40000;
		performanceRecord = -1;
		foodCollectedRecordGen = -1;
		foodCollectedRecord = 1;
		subjectCount = 0;
		
//		_fitness
//		_foodCollected
//		_attachedToFood;
//		_assistWithFood;
//		_moveTowardSignal;
//		_moveFoodTowardGoal;
//		_handsFull;
//		_hitObstacle;
//		_droppedFood;
	}
	
	@Override
	protected int evaluate(Chromosome genotype, NDActivatorArray substrate, int evalThreadIndex) {
		ArrayList<Float> fitnessArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> performanceArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> foodCollectedArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> attachedToFoodArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> assistWithFoodArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> moveTowardSignalArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> moveFoodTowardGoalArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> handsFullArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> hitObstacleArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> droppedFoodArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> improvementFactorArray = new ArrayList<Float>(simulationsToRun);
		float temp_nonTugFitness;
		float temp_performance;
		float temp_foodCollected;
		float temp_attachedToFood;
		float temp_assistWithFood;
		float temp_moveTowardSignal;
		float temp_moveFoodTowardGoal;
		float temp_handsFull;
		float temp_hitObstacle;
		float temp_droppedFood;
		float temp_improvementFactor;

		float fitness = 0;
		float performance = 0;
		float foodCollected;
		float attachedToFood;
		float assistWithFood;
		float moveTowardSignal;
		float moveFoodTowardGoal;
		float handsFull;
		float hitObstacle;
		float droppedFood;
		float improvementFactor;
		int generation = subjectCount / popSize;

		// Hacks
		float iterationFitness = 0;
		float lastIterationFitness;
		float firstIterationFitness = 0;
		float improvement1Over0 = 0;
		float improvement2Over1 = 0;
		float improvement2Over0 = 0;
		
		
		NDActivatorArray netArray = learningType == OnlineLearningNetType.NONE ? substrate : Util.buildLearningNets(substrate, learningType);
		Environment environment = new Environment(props);
		environment.setSasasNetArray(netArray);
		
		if (generation < genToStopSingleAgentTowMode)
			environment.setSingleAgentTowMode(true);
		else 
			environment.setSingleAgentTowMode(false);
		
		for (int i = 0; i < simulationsToRun; i++) {
			if (learningType != OnlineLearningNetType.NONE && resetLearningEachSimulation) resetLearning(netArray);
			
			if (i > 0 && environment.getUseTrainingBank()) {
				TrainingBank trainingBank = environment.getTrainingBank();
				trainingBank.trainToMseThreshold(mseThreshold, maxNumTrainingEpochs);
				trainingBank.clear();
			}
			
			environment.resetLayout();
			
			environment.runSim(timeSteps);
			
			lastIterationFitness = iterationFitness;
			iterationFitness = calculateFitness(environment, generation);
			fitnessArray.add(iterationFitness);
			performanceArray.add(temp_performance = (float) (environment.getFoodCollected()/foodCollectedGoal));
			foodCollectedArray.add(temp_foodCollected = (float)environment.getFoodCollected());
			attachedToFoodArray.add(temp_attachedToFood = (float)environment.getAttachedToFood());
			assistWithFoodArray.add(temp_assistWithFood = (float)environment.getAssistWithFood());
			moveTowardSignalArray.add(temp_moveTowardSignal = (float)environment.getMoveTowardSignalCount());
			moveFoodTowardGoalArray.add(temp_moveFoodTowardGoal = (float)environment.getMoveFoodTowardGoal());
			handsFullArray.add(temp_handsFull = (float)environment.getHandsFull());
			hitObstacleArray.add(temp_droppedFood = (float)environment.getHitObstacle());
			droppedFoodArray.add(temp_hitObstacle = (float)environment.getDroppedFood());
						
			
			// For improvement factor metric, all other metrics treated equal
//			temp_nonTugFitness = temp_performance
//			+ temp_attachedToFood/fitnessDivisor
//			+ temp_assistWithFood/fitnessDivisor
//			+ temp_moveTowardSignal/fitnessDivisor
//			+ temp_moveFoodTowardGoal/fitnessDivisor
//			+ (1 - temp_handsFull/fitnessDivisor)
//			+ (1 - temp_droppedFood/fitnessDivisor)
//			+ (1 - temp_hitObstacle/fitnessDivisor);
//			temp_nonTugFitness /= 8;
//			improvementFactorArray.add(temp_nonTugFitness);
			improvementFactorArray.add(temp_performance);
			
			// HACK - fix later
			switch(i) {
			case 0:
				firstIterationFitness = iterationFitness;
				break;
			case 1:
				improvement1Over0 = iterationFitness / lastIterationFitness;
				break;
			case 2:
				improvement2Over1 = iterationFitness / lastIterationFitness;
				improvement2Over0 = iterationFitness / firstIterationFitness;
				break;
			}
			
		}
		
		fitness = averageResults(fitnessArray);
		performance = averageResults(performanceArray);
		foodCollected = averageResults(foodCollectedArray);
		attachedToFood = averageResults(attachedToFoodArray);
		assistWithFood = averageResults(assistWithFoodArray);
		moveTowardSignal = averageResults(moveTowardSignalArray);
		moveFoodTowardGoal = averageResults(moveFoodTowardGoalArray);
		handsFull = averageResults(handsFullArray);
		hitObstacle = averageResults(hitObstacleArray);
		droppedFood = averageResults(droppedFoodArray);
		
		
		improvementFactor = calculateImprovementFactor(improvementFactorArray);
		
		// Hackish
		if (useImprovementFactor) {
			fitness *= 1 - improvementFactorWgt;
			fitness += (improvementFactor / improvementFactorGoal) * improvementFactorWgt;
		}
		
		genotype.setPerformanceValue((float) performance);
		
		synchronized(lock) {
			
			_fitness[subjectCount] = (float) fitness;
//			_performance[subjectCount] = (float) performance;
			_foodCollected[subjectCount] = (float) foodCollected;
			_chromosome[subjectCount] = genotype.getId();
			_attachedToFood[subjectCount] = (float) attachedToFood;
			_assistWithFood[subjectCount] = (float) assistWithFood;
			_moveTowardSignal[subjectCount] = (float) moveTowardSignal;
			_moveFoodTowardGoal[subjectCount] = (float) moveFoodTowardGoal;
			_handsFull[subjectCount] = (float) handsFull;
			_hitObstacle[subjectCount] = (float) hitObstacle;
			_droppedFood[subjectCount] = (float) droppedFood;
			_improvementFactor[subjectCount] = (float) improvementFactor;
			_improvement1Over0[subjectCount] = improvement1Over0;
			_improvement2Over1[subjectCount] = improvement2Over1;
			_improvement2Over0[subjectCount] = improvement2Over0;
			
			if (fitness > fitnessRecord) {
				logger.info("** FIT RECORD: Chrmsm: " + genotype.getId() + ", gen: " + generation + ", evalCount: " + subjectCount + ", perf: " + performance + ", fit: " + fitness + ", foodColl: " + foodCollected);
				fitnessRecord = fitness;
			} 
			if (performance > performanceRecord) {
				logger.info("*** PERF RECORD: Chrmsm: " + genotype.getId() + ", gen: " + generation + ", evalCount: " + subjectCount + ", perf: " + performance + ", fit: " + fitness + ", foodColl: " + foodCollected);
				performanceRecord = performance;
			} 
			if (foodCollected > foodCollectedRecord) {
				logger.info("** FOOD RECORD: Chrmsm: " + genotype.getId() + ", gen: " + generation + ", evalCount: " + subjectCount + ", perf: " + performance + ", fit: " + fitness + ", foodColl: " + foodCollected);
				foodCollectedRecord = foodCollected;
				foodCollectedRecordGen = generation;
				bestIndividual = genotype.getId();
				
				// If we've surpassed the food collection threshold and haven't set the gen to stop using singleagent tow mode, do so.
				if (foodCollectedRecord >= foodCollectionThreshold && genToStopSingleAgentTowMode > generation)
					genToStopSingleAgentTowMode = generation + 1;
			} 
			
			if (foodCollected >= foodCollectedGoal * 0.96) {
				runSuccessful = true;
			}
		
			subjectCount++;	
		}
		
		return (int) Math.round(fitness * maxFitnessValue);
	}

	/**
	 * Reset weights to their values at the time the learning activator was created.
	 * @param substrate
	 */
	private void resetLearning(NDActivatorArray substrate) {
		for (ActivatorArrayIterator net = substrate.iterator(); net.hasNext(); net.next()) {
			OnlineLearningActivatorNDBase lrnet = (OnlineLearningActivatorNDBase) net.get();
			lrnet.resetLearning();
		}		
	}



	private float calculateFitness(Environment e, int generation) {
//		double foodCollectedShapeFactor = Math.min(foodCollectedWgt + ((double) generation / (numGens*10)), 1);
//		double moveFoodTowardGoalShapeFactor = Math.min(moveFoodTowardGoalWgt + ((double) generation / (numGens*10)), 1);
//		double assistWithFoodShapeFactor = Math.max(assistWithFoodWgt - ((double) generation / (numGens*10)), .005);
//		double attachedToFoodShapeFactor = Math.max(attachedToFoodWgt - ((double) generation / (numGens*10)), .001);
//		double moveTowardSignalShapeFactor = Math.max(moveTowardSignalWgt - ((double) generation / (numGens*10)), .005);
//		double handsFullShapeFactor = Math.min(handsFullWgt + ((double) generation / (numGens*10)), 1);
//		double hitWallShapeFactor = Math.min(hitWallWgt + ((double) generation / (numGens*10)), 1);
		
		double foodCollectedShapeFactor = generation * foodCollectedSlope + foodCollectedFactorStart;
		double moveFoodTowardGoalShapeFactor = generation * moveFoodTowardGoalSlope + moveFoodTowardGoalFactorStart;
		double assistWithFoodShapeFactor = generation * assistWithFoodSlope + assistWithFoodFactorStart;
		double attachedToFoodShapeFactor = generation * attachedToFoodSlope + attachedToFoodFactorStart;
		double moveTowardSignalShapeFactor = generation * moveTowardSignalSlope + moveTowardSignalFactorStart;
		double handsFullShapeFactor = generation * handsFullSlope + handsFullFactorStart;
		double hitObstacleShapeFactor = generation * hitObstacleSlope + hitObstacleFactorStart;
		double droppedFoodShapeFactor = generation * droppedFoodSlope + droppedFoodFactorStart;
		
		if (e.getFoodCollected() >= foodCollectedRecord | e.getFoodCollected() == 0) foodCollectedShapeFactor *= 3;
		double foodCollected = ((double)e.getFoodCollected()) / foodCollectedGoal * foodCollectedShapeFactor;
		
		
		double moveFoodTowardGoal = ((double)e.getMoveFoodTowardGoal()) / moveFoodTowardGoalGoal * moveFoodTowardGoalShapeFactor;
		
		double assistWithFood = ((double)e.getAssistWithFood()) / assistWithFoodGoal * assistWithFoodShapeFactor;
		
		double attachedToFood = ((double)e.getAttachedToFood()) / attachedToFoodGoal * attachedToFoodShapeFactor;
		
		double moveTowardSignal = ((double)e.getMoveTowardSignalCount()) / moveTowardSignalGoal * moveTowardSignalShapeFactor;
		
		// Penalties
		double handsFull = handsFullShapeFactor - ((double)e.getHandsFull()) / handsFullGoal * handsFullShapeFactor;
		double hitWall = hitObstacleShapeFactor - ((double)e.getHitObstacle()) / hitObstacleGoal * hitObstacleShapeFactor;
		double droppedFood = droppedFoodShapeFactor - ((double)e.getDroppedFood()) / droppedFoodGoal * droppedFoodShapeFactor;

		double fitness = foodCollected
				+ moveFoodTowardGoal
				+ assistWithFood
				+ attachedToFood
				+ handsFull
				+ hitWall
				+ moveTowardSignal
				+ droppedFood;
		
		fitness /= foodCollectedShapeFactor 
				+ moveFoodTowardGoalShapeFactor 
				+ assistWithFoodShapeFactor 
				+ attachedToFoodShapeFactor
				+ moveTowardSignalShapeFactor 
				+ hitObstacleShapeFactor
				+ handsFullShapeFactor
				+ droppedFoodShapeFactor;
		
		if (fitness < 0) fitness = 0;
		if (fitness > 1) fitness = 1;
		
		return (float) fitness;
	}

	protected float calculateImprovementFactor(ArrayList<Float> performanceArray) {
		float wgt = 1.0f;
		float wgtTotal = 0;
		float improvementFactor = 0;
		
		for (int i = 0; i < performanceArray.size(); i++) {
			improvementFactor += (performanceArray.get(i) + 1) * wgt;
			wgtTotal += wgt;
			wgt += 0.1f;
		}
		
		improvementFactor /= wgtTotal;
		improvementFactor = (improvementFactor - performanceArray.get(0) + 1) / (performanceArray.get(0) + 1);
		
		improvementFactor = improvementFactor < 0 ? 0 : improvementFactor;
//		improvementFactor = improvementFactor > 1 ? 1 : improvementFactor;
		
		return improvementFactor;
	}
	
	public static float averageResults(ArrayList<Float> fitnessArray) {
		double result = 0;
		for (Float err : fitnessArray) {
			result += err;
		}
		return (float) (result / fitnessArray.size());
	}

	
	public int getMaxFitnessValue() {
		return maxFitnessValue;
	}
	
	public static void main( String[] args ) throws Throwable {
		if ( args.length < 1 ) {
			System.exit( -1 );
		}
		
        SingleAgentSensoryArraySubstrateFitnessFunction ff;

		// Anji Properties class... file must be in classpath 
//		Properties props = new Properties( args[ 0 ] );
//		if (args.length > 1) {
//			for (int i = 1; i < args.length; i++) {
//				props.appendProperties(args[i]);
//			}
//		}
        Properties props = new Properties();
        props.loadFromResources(args);
		
		String runName = props.getProperty("run.name");
        String runID = runName + "-" + System.currentTimeMillis();
        String outputDir = props.getProperty("fitness_function.class") + File.separatorChar + runID + File.separatorChar;
        props.setProperty("run.id", ""+runID);
        props.setProperty("output.dir", outputDir);
        int numRuns = props.getIntProperty(NUM_RUNS_KEY);
        String baseDir = props.getProperty(BASE_DIR_KEY);
        double avgRunTime = 0;
        
        
        RunSetup.setup(runName, numRuns);
        
        for (int run = 1; run <= numRuns; run++) {
        	long startRun = System.currentTimeMillis();
        	
        	props = new Properties();
            props.loadFromResources(args);
        	
        	props.put(BASE_DIR_KEY, baseDir + "_" + run);
            props.setProperty("run.id", ""+runID);
            props.setProperty("output.dir", outputDir);
        	
	        Evolver evolver = new Evolver();
	        evolver.init( props );
	        ff = (SingleAgentSensoryArraySubstrateFitnessFunction) evolver.getFitnessFunction();
	        if (run > 0) ff.initializeStatRecords();
	        
        	logger.info("\n\n--- START RUN: " + run + " of " + numRuns + " (" + ((run*100)/(numRuns)) + "%) ---------------------------------------\n\n");
        	
            evolver.run();
           	        
			int popSize = ff.popSize;
			float[] _fitness = ff._fitness;
			long[] _chromosome = ff._chromosome;
			float[] _foodCollected = ff._foodCollected;
//			float[] _performance = ff._performance;
			float[] _attachedToFood = ff._attachedToFood;
			float[] _assistWithFood = ff._assistWithFood;
			float[] _moveTowardSignal = ff._moveTowardSignal;
			float[] _moveFoodTowardGoal = ff._moveFoodTowardGoal;
			float[] _handsFull = ff._handsFull;
			float[] _hitObstacle = ff._hitObstacle;
			float[] _droppedFood = ff._droppedFood;
			float[] _improvementFactor = ff._improvementFactor;
			float[] _improvement1Over0 = ff._improvement1Over0;
			float[] _improvement2Over1 = ff._improvement2Over1;
			float[] _improvement2Over0 = ff._improvement2Over0;
	        
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/fitnessAll.csv", _fitness, _chromosome, popSize, "Fitness" );
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/foodCollectedAll.csv", _foodCollected, _chromosome, popSize, "Average Food Collected");
	        
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/AttachedToFoodAll.csv", _attachedToFood, _chromosome, popSize, "AttachedToFood" );
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/AssistWithFoodAll.csv", _assistWithFood, _chromosome, popSize, "AssistWithFood");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/MoveTowardSignalAll.csv", _moveTowardSignal, _chromosome, popSize, "MoveTowardSignal");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/MoveFoodTowardGoalAll.csv", _moveFoodTowardGoal, _chromosome, popSize, "MoveFoodTowardGoal");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/HandsFullAll.csv", _handsFull, _chromosome, popSize, "HandsFull");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/HitObstacleAll.csv", _hitObstacle, _chromosome, popSize, "HitObstacle");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/DroppedFoodAll.csv", _droppedFood, _chromosome, popSize, "DroppedFood");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/improvementFactorAll.csv", _improvementFactor, _chromosome, popSize, "ImprovementFactor");
	        
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/improvement1Over0.csv", _improvement1Over0, _chromosome, popSize, "improvement1Over0");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/improvement2Over1.csv", _improvement2Over1, _chromosome, popSize, "improvement2Over1");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/improvement2Over0.csv", _improvement2Over0, _chromosome, popSize, "improvement2Over0");

	        
	        String[] resultsNames = new String[] {
	        		"Fitness"
	        		, "FoodCollected"
	        		, "AttachedToFood"
	        		, "AssistWithFood"
	        		, "MoveTowardSignal"
	        		, "MoveFoodTowardGoal"
	        		, "HandsFull"
	        		, "HitObstacle"
	        		, "DroppedFood"
	        		, "ImprovementFactor"
	        		, "improvement1Over0"
	        		, "improvement2Over1"
	        		, "improvement2Over0"
			};
	        
	        float[][] results = new float[][] {
	        		_fitness
	        		,_foodCollected
	        		,_attachedToFood
	        		,_assistWithFood
	        		,_moveTowardSignal
	        		,_moveFoodTowardGoal
	        		,_handsFull
	        		,_hitObstacle
	        		,_droppedFood
	        		,_improvementFactor
	        		,_improvement1Over0
	        		,_improvement2Over1
	        		,_improvement2Over0
	        };
	        Util.writeAllAggregateResultsToFile("./runs/"+runName+"_"+run+"/allAggregate.csv", popSize, resultsNames, results);
	        
	        evolver.dispose();
	        
	        long duration = (System.currentTimeMillis() - startRun) / 1000;
			if (avgRunTime == 0)
				avgRunTime = duration;
			else
				avgRunTime = avgRunTime * 0.9 + duration * 0.1;
			int eta = (int) Math.round(avgRunTime * (numRuns - (run+1)));
			logger.info("\n--- Run " + run + " finished in " + Misc.formatTimeInterval(duration) +".  ETA to complete all runs:" + Misc.formatTimeInterval(eta) + ". ------------------\n");
			
			logger_resultsOnly.info("#SASAS# Run=" + run + ",Successful=" + ff.runSuccessful + ",Best_entity=" + ff.bestIndividual + ",food_collected=" + ff.foodCollectedRecord + ",gen=" + ff.foodCollectedRecordGen + "");
        }
        
        System.exit( 0 );
	}
	

	public boolean endRun() {
		return super.endRun();
	}

	public void dispose() {
		super.dispose();
	}

	@Override
	protected void scale(int scaleCount, int scaleFactor) {

	}	
}

