package digitRecognitionProblem.learnWeights;

import java.util.List;

import mlp.Mlp;
import utils.RandomGenerator;
import genetic_algorithm.Chromosome;
import genetic_algorithm.Mutation;

/**
 * A chromosome's neural network is mutated by adding a random value, out of the initial 
 * weights range, to all input edges of </code> NUM_NEURONS </code> chosen at random 
 */
public class LWmutation implements Mutation {

	public static int NUM_NEURONS = 2; // number of neurons to change while mutating a chromosome

	/**
	 * Mutate each of the given chromosome in chance of
	 * the mutation rate
	 */
	@Override
	public void mutate(List<Chromosome> chromosomes, double rate) {

		// scan all chromosomes
		for (Chromosome c : chromosomes) {
			
			// mutate according to mutation rate
			if (RandomGenerator.nextDouble() <= rate) {
				
				// choose neurons and mutate them
				LWchromosome mlpChromosome = (LWchromosome) c;
				
				for (int currNeuron = 0 ; currNeuron < NUM_NEURONS ; ++currNeuron) {
					
					// choose layer
					int layerIndex = RandomGenerator.nextInt(mlpChromosome.getAllValues().size());
					
					// choose neuron
					int neuronIndex = RandomGenerator.nextInt(mlpChromosome.getValue(layerIndex).size());
					
					// for each input weight, add random value from the initialization distribution
					float[] weights = mlpChromosome.getValue(layerIndex).getWeights(neuronIndex);
					int numEdges = weights.length;
					for (int currEdge = 0 ; currEdge < numEdges ; ++currEdge) {
						weights[currEdge] += RandomGenerator.nextFloat(Mlp.INIT_WEIGHT_RANGE);
					}
					
					// set new weights
					mlpChromosome.getValue(layerIndex).setWeights(neuronIndex, weights);
				}
			}
			
		}
		
	}

}
