package org.slusk.thynwor.neat;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;

import javax.xml.bind.JAXBException;

import ojc.util.Misc;

import org.jgap.Chromosome;
import org.slusk.thynwor.corestructures.Environment;
import org.slusk.thynwor.util.RunSetup;
import org.slusk.thynwor.util.Util;

import com.anji.hyperneat.modular.ModularHnTugFf;
import com.anji.hyperneat.modular.ModularHyperNeatFitnessFunction;
import com.anji.hyperneat.nd.NDActivatorArray;
import com.anji.hyperneat.nd.NDActivatorArray.ActivatorArrayIterator;
import com.anji.hyperneat.onlinereinforcement.OnlineLearningActivatorNDBase;
import com.anji.hyperneat.onlinereinforcement.OnlineLearningNetType;
import com.anji.neat.Evolver;
import com.anji.tug.TugManager;
import com.anji.util.Properties;

public class SasasTugFF extends ModularHnTugFf {
	private static final String LEARNING_TYPE = "ann.hyperneat.online.learning.type";
	private static final String NUM_RUNS_KEY = "num.runs";
	private static final String BASE_DIR_KEY = "persistence.base.dir";
	
	private static final String TUG_OBJECTIVES_XML = "fitness.tug.objectives.xml";
	
	protected static final String ATTACHEDTOFOOD_KEY = "attachedToFood";
	protected static final String MOVEDTOWARDSIGNAL_KEY = "movedTowardSignal";
	protected static final String MOVEFOODTOWARDGOAL_KEY = "moveFoodTowardGoal";
	protected static final String ASSISTWITHFOOD_KEY = "assistWithFood";
	protected static final String FOODCOLLECTED_KEY = "foodCollected";
	protected static final String HANDSFULL_KEY = "handsFull";
	protected static final String HITOBSTACLE_KEY = "hitObstacle";
	protected static final String DROPPEDFOOD_KEY = "droppedFood";
	protected static final String IMPROVEMENTFACTOR_KEY = "improvementFactor";
	
	private static final String USE_IMPROVEMENT_FACTOR = "fitness.useImprovementFactor";
	
	private static final long serialVersionUID = -5913552257257746198L;
	private OnlineLearningNetType learningType;

	private Properties props;
	private int popSize;
	private int numGens;
	private double fitnessRecord = -40000;
	private double performanceRecord = -1;
	private double foodCollectedRecord = 1;
	private int subjectCount = 0;
	private Object staticLock; 
	private static final int simulationsToRun = 3;
	private static final int timeSteps = 50;
	private static final int maxFitnessValue = 1000;
	// private static final double foodCollectedWgt = 0.2;
	// private static final double moveFoodTowardGoalWgt = 0.25;
	// private static final double moveTowardSignalWgt = 0.5;
	// private static final double assistWithFoodWgt = 0.4;
	// private static final double attachedToFoodWgt = 0.05;
	// private static final double handsFullWgt = 0.05;
	// private static final double hitWallWgt = 0.05;
	//
	private static final double foodCollectedGoal = ((double) timeSteps) / 5; // Return 1 food every x steps
	// private static final double moveFoodTowardGoalGoal = (double) timeSteps *
	// 6; // any given turn, one bot is moving toward goal with food
	// private static final double moveTowardSignalGoal = (double) timeSteps *
	// 6;
	// private static final double assistWithFoodGoal = (double) timeSteps * 6;
	// private static final double attachedToFoodGoal = (double) timeSteps * 6;
	// private static final double handsFullGoal = (double) timeSteps * 6;
	// private static final double hitObstacleGoal = (double) timeSteps * 6;
	// private static final double droppedFoodGoal = (double) timeSteps * 6;
	//
	// private final double foodCollectedFactorStart;
	// private final double foodCollectedFactorEnd;
	// private final double moveFoodTowardGoalFactorStart;
	// private final double moveFoodTowardGoalFactorEnd;
	// private final double moveTowardSignalFactorStart;
	// private final double moveTowardSignalFactorEnd;
	// private final double assistWithFoodFactorStart;
	// private final double assistWithFoodFactorEnd;
	// private final double attachedToFoodFactorStart;
	// private final double attachedToFoodFactorEnd;
	// private final double handsFullFactorStart;
	// private final double handsFullFactorEnd;
	// private final double hitObstacleFactorStart;
	// private final double hitObstacleFactorEnd;
	// private final double droppedFoodFactorStart;
	// private final double droppedFoodFactorEnd;
	//
	// private final double foodCollectedSlope;
	// private final double moveFoodTowardGoalSlope;
	// private final double moveTowardSignalSlope;
	// private final double assistWithFoodSlope;
	// private final double attachedToFoodSlope;
	// private final double handsFullSlope;
	// private final double hitObstacleSlope;
	// private final double droppedFoodSlope;
	private static final boolean resetLearningEachSimulation = false;
	private boolean useImprovementFactor;	// This is of no use if the above is true.
	
	// These values control when to begin evaluating without single agent tow mode. 
	private static final int foodCollectionThreshold = 6;
	private static int genToStopSingleAgentTowMode = Integer.MAX_VALUE;
	
	private static Logger logger;
	private FileHandler fileTxt;
	private SimpleFormatter formatterTxt;
	private String runName;
	private long[] _chromosome;
	private float[] _fitness;
	private float[] _performance;
	private float[] _foodCollected;
	private float[] _attachedToFood;
	private float[] _assistWithFood;
	private float[] _moveTowardSignal;
	private float[] _moveFoodTowardGoal;
	private float[] _handsFull;
	private float[] _hitObstacle;
	private float[] _droppedFood;
	private float[] _improvementFactor;
	
	private static final float fitnessDivisor = timeSteps * 6;	// 6 = # of agents; idea is that if every agent does this every turn, that is the best/worst possible performance.
	
	static {

	}

	public SasasTugFF() {
	}
	
	@Override
	public void init(Properties props) {
		super.init(props);
		this.props = props;
		this.popSize = props.getIntProperty("popul.size");
		this.numGens = props.getIntProperty("num.generations");
		this.learningType = OnlineLearningNetType.valueOf(props.getProperty(LEARNING_TYPE));
		this.runName = props.getProperty("run.name");
		
		useImprovementFactor = props.getBooleanProperty(USE_IMPROVEMENT_FACTOR);
		
		logger = Logger.getLogger(SasasTugFF.class.getName());
		logger.setLevel(Level.INFO);
		
		if (null == fileTxt) {
			try {
				fileTxt = new FileHandler("./runs/" + runName + "/sasas_" + runName + "_" + Util.timeStamp() + ".log");
				formatterTxt = new SimpleFormatter();
				fileTxt.setFormatter(formatterTxt);
				logger.addHandler(fileTxt);
			} catch (SecurityException e) {
				e.printStackTrace();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
		
		staticLock = new Object();	
		
		initializeStatRecords();
		initializeTugManager(props);
	}

	@Override
	public void initializeTugManager(Properties props) {
		try {
			tugManager = TugManager.createTugManagerFromObjectivesXml(props.getProperty(TUG_OBJECTIVES_XML));
			tugManager.setLogger(logger);
		} catch (JAXBException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
			logger.severe("Could not load objectives from xml: " + String.valueOf(props.getProperty(TUG_OBJECTIVES_XML)));
		}		
	}
	
	private void initializeStatRecords() {
		int size = numGens * popSize;
		_chromosome = new long[size];
		_fitness = new float[size];
		_performance = new float[size];
		_foodCollected = new float[size];
		_attachedToFood = new float[size];
		_assistWithFood = new float[size];
		_moveTowardSignal = new float[size];
		_moveFoodTowardGoal = new float[size];
		_handsFull = new float[size];
		_hitObstacle = new float[size];
		_droppedFood = new float[size];
		_improvementFactor = new float[size];
	}

	@Override
	protected int evaluate(Chromosome genotype, NDActivatorArray substrate,
			int evalThreadIndex) {
//		ArrayList<Double> fitnessArray = new ArrayList<Double>(simulationsToRun);
		ArrayList<Float> performanceArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> foodCollectedArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> attachedToFoodArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> assistWithFoodArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> moveTowardSignalArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> moveFoodTowardGoalArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> handsFullArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> hitObstacleArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> droppedFoodArray = new ArrayList<Float>(simulationsToRun);
		ArrayList<Float> improvementFactorArray = new ArrayList<Float>(simulationsToRun);
		
		float nonTugFitness = 0.0f, performance = 0.0f, foodCollected;
		float attachedToFood;
		float assistWithFood;
		float moveTowardSignal;
		float moveFoodTowardGoal;
		float handsFull;
		float hitObstacle;
		float droppedFood;
		float improvementFactor;
		
		float temp_nonTugFitness;
		float temp_performance;
		float temp_foodCollected;
		float temp_attachedToFood;
		float temp_assistWithFood;
		float temp_moveTowardSignal;
		float temp_moveFoodTowardGoal;
		float temp_handsFull;
		float temp_hitObstacle;
		float temp_droppedFood;
		float temp_improvementFactor;
		
		int generation = subjectCount / popSize;
		
		NDActivatorArray netArray = learningType == OnlineLearningNetType.NONE ? substrate : Util.buildLearningNets(substrate, learningType);
		Environment environment = new Environment(props);
		environment.setSasasNetArray(netArray);
		
		if (generation < genToStopSingleAgentTowMode)
			environment.setSingleAgentTowMode(true);
		else
			environment.setSingleAgentTowMode(false);
		
		for (int i = 0; i < simulationsToRun; i++) {
			if (learningType != OnlineLearningNetType.NONE && resetLearningEachSimulation)
				resetLearning(netArray);
			
			environment.resetLayout();
			environment.runSim(timeSteps);
						
			performanceArray.add(temp_performance = (float) (environment.getFoodCollected() / foodCollectedGoal));
			foodCollectedArray.add(temp_foodCollected = environment.getFoodCollected());
			attachedToFoodArray.add(temp_attachedToFood = environment.getAttachedToFood());
			assistWithFoodArray.add(temp_assistWithFood = environment.getAssistWithFood());
			moveTowardSignalArray.add(temp_moveTowardSignal = environment.getMoveTowardSignalCount());
			moveFoodTowardGoalArray.add(temp_moveFoodTowardGoal = environment.getMoveFoodTowardGoal());
			handsFullArray.add(temp_handsFull = environment.getHandsFull());
			hitObstacleArray.add(temp_hitObstacle = environment.getHitObstacle());
			droppedFoodArray.add(temp_droppedFood = environment.getDroppedFood());
			
			temp_nonTugFitness = temp_performance
					+ temp_attachedToFood/fitnessDivisor
					+ temp_assistWithFood/fitnessDivisor
					+ temp_moveTowardSignal/fitnessDivisor
					+ temp_moveFoodTowardGoal/fitnessDivisor
					+ (1 - temp_handsFull/fitnessDivisor)
					+ (1 - temp_droppedFood/fitnessDivisor)
					+ (1 - temp_hitObstacle/fitnessDivisor);
			temp_nonTugFitness /= 8;
			improvementFactorArray.add(temp_nonTugFitness);
		}
		
		performance = (float) averageResults(performanceArray);
		foodCollected = (float) averageResults(foodCollectedArray);
		attachedToFood = (float) averageResults(attachedToFoodArray);
		assistWithFood = (float) averageResults(assistWithFoodArray);
		moveTowardSignal = (float) averageResults(moveTowardSignalArray);
		moveFoodTowardGoal = (float) averageResults(moveFoodTowardGoalArray);
		handsFull = (float) averageResults(handsFullArray);
		hitObstacle = (float) averageResults(hitObstacleArray);
		droppedFood = (float) averageResults(droppedFoodArray);
		
		nonTugFitness = performance 				// use performance instead of food collected since it is already scaled 0-1
			+ attachedToFood/fitnessDivisor
			+ assistWithFood/fitnessDivisor
			+ moveTowardSignal/fitnessDivisor
			+ moveFoodTowardGoal/fitnessDivisor
			+ (1 - handsFull/fitnessDivisor)
			+ (1 - droppedFood/fitnessDivisor)
			+ (1 - hitObstacle/fitnessDivisor);
		nonTugFitness /= 8;
		
		improvementFactor = calculateImprovementFactor(improvementFactorArray);
		
		
		_fitness[subjectCount] = nonTugFitness;
		_performance[subjectCount] = performance;
		_foodCollected[subjectCount] = foodCollected;
		_chromosome[subjectCount] = genotype.getId();
		_attachedToFood[subjectCount] = attachedToFood;
		_assistWithFood[subjectCount] = assistWithFood;
		_moveTowardSignal[subjectCount] = moveTowardSignal;
		_moveFoodTowardGoal[subjectCount] = moveFoodTowardGoal;
		_handsFull[subjectCount] = handsFull;
		_hitObstacle[subjectCount] = hitObstacle;
		_droppedFood[subjectCount] = droppedFood;
		_improvementFactor[subjectCount] = improvementFactor;
		
		
		HashMap<String, Float> metrics = new HashMap<String, Float>(16);
		metrics.put(ATTACHEDTOFOOD_KEY, attachedToFood);
		metrics.put(ASSISTWITHFOOD_KEY, assistWithFood);
		metrics.put(MOVEDTOWARDSIGNAL_KEY, moveTowardSignal);
		metrics.put(MOVEFOODTOWARDGOAL_KEY, moveFoodTowardGoal);
		metrics.put(FOODCOLLECTED_KEY, foodCollected);
		metrics.put(HANDSFULL_KEY, 1-(handsFull/fitnessDivisor));
		metrics.put(HITOBSTACLE_KEY, 1-(hitObstacle/fitnessDivisor));
		metrics.put(DROPPEDFOOD_KEY, 1-(droppedFood/fitnessDivisor));
//		if (learningType != OnlineLearningNetType.NONE)
		if (useImprovementFactor)
			metrics.put(IMPROVEMENTFACTOR_KEY, improvementFactor);
		
		individualMetrics.put(genotype, metrics);
		
		genotype.setPerformanceValue((float) performance);
		
		synchronized (staticLock) {
			if (nonTugFitness > fitnessRecord) {
				logger.info("** NON-TUG FIT RECORD: Chrmsm: " + genotype.getId()
						+ ", gen: " + generation + ", evalCount: "
						+ subjectCount + ", perf: " + performance + ", fit: "
						+ nonTugFitness + ", foodColl: " + foodCollected);
				fitnessRecord = nonTugFitness;
			}
			if (performance > performanceRecord) {
				logger.info("*** PERF RECORD: Chrmsm: " + genotype.getId()
						+ ", gen: " + generation + ", evalCount: "
						+ subjectCount + ", perf: " + performance + ", fit: "
						+ nonTugFitness + ", foodColl: " + foodCollected);
				performanceRecord = performance;
			}
			if (foodCollected > foodCollectedRecord) {
				logger.info("** FOOD RECORD: Chrmsm: " + genotype.getId()
						+ ", gen: " + generation + ", evalCount: "
						+ subjectCount + ", perf: " + performance + ", fit: "
						+ nonTugFitness + ", foodColl: " + foodCollected);
				foodCollectedRecord = foodCollected;
				
				// If we've surpassed the food collection threshold and haven't set the gen to stop using singleagent tow mode, do so.
				if (foodCollectedRecord >= foodCollectionThreshold && genToStopSingleAgentTowMode > generation)
					genToStopSingleAgentTowMode = generation + 1;
			}
			subjectCount++;
		}
		
		return (int) Math.round(nonTugFitness * maxFitnessValue);
	}

	protected float calculateImprovementFactor(ArrayList<Float> performanceArray) {
		float wgt = 1.0f;
		float wgtTotal = 0;
		float improvementFactor = 0;
		
		for (int i = 0; i < performanceArray.size(); i++) {
			improvementFactor += performanceArray.get(i) * wgt;
			wgtTotal += wgt;
			wgt += 0.1f;
		}
		
		improvementFactor /= wgtTotal;
		improvementFactor = (improvementFactor - performanceArray.get(0)) / performanceArray.get(0);
		
		improvementFactor = improvementFactor < 0 ? 0 : improvementFactor;
//		improvementFactor = improvementFactor > 1 ? 1 : improvementFactor;
		
		return improvementFactor;
	}

	/**
	 * Reset weights to their values at the time the learning activator was
	 * created.
	 * 
	 * @param substrate
	 */
	private void resetLearning(NDActivatorArray substrate) {
		for (ActivatorArrayIterator net = substrate.iterator(); net.hasNext(); net.next()) {
			OnlineLearningActivatorNDBase lrnet = (OnlineLearningActivatorNDBase) net.get();
			lrnet.resetLearning();
		}
	}


	public static double averageResults(ArrayList<Float> fitnessArray) {
		double result = 0;
		for (Float err : fitnessArray) {
			result += err;
		}
		return result / fitnessArray.size();
	}

	public int getMaxFitnessValue() {
		return maxFitnessValue;
	}

	public static void main(String[] args) throws Throwable {
		if (args.length < 1) {
			System.exit(-1);
		}
		
		SasasTugFF ff;
		
		// Anji Properties class... file must be in classpath 
        Properties props = new Properties();
        props.loadFromResources(args);		
		
		String runName = props.getProperty("run.name");
        String runID = runName + "-" + System.currentTimeMillis();
        String outputDir = props.getProperty("fitness_function.class") + File.separatorChar + runID + File.separatorChar;
        props.setProperty("run.id", ""+runID);
        props.setProperty("output.dir", outputDir);
        int numRuns = props.getIntProperty(NUM_RUNS_KEY);
        String baseDir = props.getProperty(BASE_DIR_KEY);
        double avgRunTime = 0;
        
        
        RunSetup.setup(runName, numRuns);
        
        for (int run = 1; run <= numRuns; run++) {
        	long startRun = System.currentTimeMillis();
        	
        	props = new Properties();
            props.loadFromResources(args);
        	
        	props.put(BASE_DIR_KEY, baseDir + "_" + run);
            props.setProperty("run.id", ""+runID);
            props.setProperty("output.dir", outputDir);
        	
	        Evolver evolver = new Evolver();
	        evolver.init( props );
	        ff = (SasasTugFF) evolver.getFitnessFunction();
	        if (run > 0) ff.initializeStatRecords();
	        
        	logger.info("\n\n--- START RUN: " + run + " of " + numRuns + " (" + ((run*100)/(numRuns)) + "%) ---------------------------------------\n\n");
        	
            evolver.run();
           	        
			int popSize = ff.popSize;
			float[] _fitness = ff._fitness;
			long[] _chromosome = ff._chromosome;
			float[] _foodCollected = ff._foodCollected;
			float[] _performance = ff._performance;
			float[] _attachedToFood = ff._attachedToFood;
			float[] _assistWithFood = ff._assistWithFood;
			float[] _moveTowardSignal = ff._moveTowardSignal;
			float[] _moveFoodTowardGoal = ff._moveFoodTowardGoal;
			float[] _handsFull = ff._handsFull;
			float[] _hitObstacle = ff._hitObstacle;
			float[] _droppedFood = ff._droppedFood;
			float[] _improvementFactor = ff._improvementFactor;
	        
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/fitnessAll.csv", _fitness, _chromosome, popSize, "Fitness" );
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/foodCollectedAll.csv", _foodCollected, _chromosome, popSize, "Average Food Collected");
	        
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/AttachedToFoodAll.csv", _attachedToFood, _chromosome, popSize, "AttachedToFood" );
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/AssistWithFoodAll.csv", _performance, _chromosome, popSize, "AssistWithFood");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/MoveTowardSignalAll.csv", _foodCollected, _chromosome, popSize, "MoveTowardSignal");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/MoveFoodTowardGoalAll.csv", _foodCollected, _chromosome, popSize, "MoveFoodTowardGoal");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/HandsFullAll.csv", _foodCollected, _chromosome, popSize, "HandsFull");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/HitObstacleAll.csv", _foodCollected, _chromosome, popSize, "HitObstacle");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/DroppedFoodAll.csv", _foodCollected, _chromosome, popSize, "DroppedFood");
	        Util.writeResultSetToFile("./runs/"+runName+"_"+run+"/improvementFactorAll.csv", _foodCollected, _chromosome, popSize, "ImprovementFactor");

	        String[] resultsNames = new String[] {
	        		"Fitness"
	        		, "FoodCollected"
	        		, "AttachedToFood"
	        		, "AssistWithFood"
	        		, "MoveTowardSignal"
	        		, "MoveFoodTowardGoal"
	        		, "HandsFull"
	        		, "HitObstacle"
	        		, "DroppedFood"
	        		, "ImprovementFactor"
			};
	        
	        float[][] results = new float[][] {
	        		_fitness
	        		,_foodCollected
	        		,_attachedToFood
	        		,_assistWithFood
	        		,_moveTowardSignal
	        		,_moveFoodTowardGoal
	        		,_handsFull
	        		,_hitObstacle
	        		,_droppedFood
	        		,_improvementFactor
	        };
	        Util.writeAllAggregateResultsToFile("./runs/"+runName+"_"+run+"/allAggregate.csv", popSize, resultsNames, results);
	        
	        evolver.dispose();
	        
	        long duration = (System.currentTimeMillis() - startRun) / 1000;
			if (avgRunTime == 0)
				avgRunTime = duration;
			else
				avgRunTime = avgRunTime * 0.9 + duration * 0.1;
			int eta = (int) Math.round(avgRunTime * (numRuns - (run+1)));
			logger.info("\n--- Run finished in " + Misc.formatTimeInterval(duration) +".  ETA to complete all runs:" + Misc.formatTimeInterval(eta) + ". ------------------\n");
        }
        
        System.exit( 0 );
	}

	public boolean endRun() {
		return super.endRun();
	}

	public void dispose() {
		super.dispose();
	}

	@Override
	protected void scale(int scaleCount, int scaleFactor) {
	}


}