/**
 * 
 */
package evolution;

import java.util.Random;
import java.util.Map.Entry;

import evolution.mlp.MLP;
import evolution.mlp.neurons.Hidden;
import evolution.mlp.neurons.Neuron;
import evolution.tutors.Tutor;
import evolution.view.RevoWorker;

/**
 * @author camille
 * 
 */
public class Evolution {

	private RevoWorker worker;

	private int N;
	private double initRate;
	private Range range;

	private Random random;
	private double sigma;

	public Evolution(int n, double initRate, Range range) {
		super();
		N = n;
		this.initRate = initRate;
		this.range = range;
		random = new Random();
	}

	public void setWorker(RevoWorker worker) {
		this.worker = worker;
	}

	/**
	 * Send progress state to the {@link RevoWorker}
	 * @param progress
	 */
	private void setProgress(int progress) {
		if (worker != null) {
			worker.firePropertyChange(RevoWorker.PROGRESS, null, progress);
		}
	}

	/**
	 * Sends a point (fitness of the fittest MLP at the given generation) to the {@link RevoWorker}
	 * @param generation
	 * @param data
	 */
	private void sendData(int generation, double data) {
		if (worker != null) {
			worker.firePropertyChange(RevoWorker.DATA, generation, data);
		}
	}

	/**
	 * Sends a info {@link String} to be displayed in the interface console
	 * @param info
	 */
	private void sendInfo(String info) {
		if (worker != null) {
			worker.firePropertyChange(RevoWorker.INFO, null, info);
		}
	}

	/**
	 * Clones and mutates an {@link MLP}
	 * @param mlp
	 * @return the mutated {@link MLP}
	 */
	private MLP mutate(MLP mlp) {

		MLP mutant = mlp.clone();

		for (Hidden hidden : mutant.getHiddenLayer()) {
			for (Entry<Neuron, Double> entry : hidden.getIn().entrySet()) {
				double w = entry.getValue() + (sigma * random.nextGaussian());
				hidden.getIn().put(entry.getKey(), new Double(w));
				entry.getKey().getOut().put(hidden, new Double(w));
			}

			for (Entry<Neuron, Double> entry : hidden.getOut().entrySet()) {
				double w = entry.getValue() + (sigma * random.nextGaussian());
				hidden.getOut().put(entry.getKey(), new Double(w));
				entry.getKey().getIn().put(hidden, new Double(w));
			}
		}

		return mutant;
	}

	/**
	 * Implementation of (1+1) - ES algorithm
	 * @param mlp the {@link MLP} which will be evolved
	 * @param tutor
	 * @return the fittest {@link MLP}
	 * @throws Exception if the {@link Tutor} doesn't accept the {@link MLP}
	 */
	public MLP saes(MLP mlp, Tutor tutor) throws Exception {

		if (!tutor.accept(mlp)) {
			sendInfo(tutor.getName() + " doesn't accept the MLP");
			throw new Exception(tutor.getName() + " doesn't accept the MLP");
		}

		sigma = (range.getMax() - range.getMin()) * initRate;

		MLP parent;
		MLP child;

		parent = mlp.clone();

		for (int i = 0; i < N; i++) {

			child = mutate(parent);

			double childFitness;
			double parentFitness;

			if ((childFitness = tutor.fitness(child)) < (parentFitness = tutor.fitness(parent))) {
				parent = child.clone();
				sigma *= 2.0d;

				sendData(i, 1.0d - childFitness);
			}
			else {
				sigma *= Math.pow(2.0d, -(1.0d / 4.0d));

				sendData(i, 1.0d - parentFitness);
			}

			setProgress((int) (100.0 * i / N));
		}

		return parent;
	}
}
