package home.mutant.gauto.generative;

import java.util.ArrayList;
import java.util.List;

import jenes.chromosome.BitwiseChromosome;
import jenes.chromosome.DoubleChromosome;
import jenes.population.Individual;


public class WeightsModel 
{
	public List<Double[][]> weights = null;
	private boolean withBias = false;
	public Double LEARNING_RATE = 1000.;
	
	public WeightsModel(List<Integer> numberNeuronsPerLayer, boolean withBias)
	{
		this.withBias = withBias;
		if (withBias)
			constructWithBias(numberNeuronsPerLayer);
		else
			constructWithoutBias(numberNeuronsPerLayer);
		randomize();
	}
	
	private void constructWithBias(List<Integer> numberNeuronsPerLayer)
	{
		if (numberNeuronsPerLayer.size()<1)
		{
			throw new IllegalArgumentException("There should be at least one layer");
		}
		
		List<Integer> cloneNumberNeurons = new ArrayList<Integer>(numberNeuronsPerLayer);
		cloneNumberNeurons.add(0, 0);
		
		weights = new ArrayList<Double[][]>();
		for (int i = 0; i<cloneNumberNeurons.size()-1;i++)
		{
			Double[][] layerWeights = new Double[cloneNumberNeurons.get(i)+1][cloneNumberNeurons.get(i+1)];
			weights.add(layerWeights);
		}
	}
	
	private void constructWithoutBias(List<Integer> numberNeuronsPerLayer)
	{
		if (numberNeuronsPerLayer.size()<2)
		{
			throw new IllegalArgumentException("There should be at least two layers");
		}
		
		weights = new ArrayList<Double[][]>();
		for (int i = 0; i<numberNeuronsPerLayer.size()-1;i++)
		{
			Double[][] layerWeights = new Double[numberNeuronsPerLayer.get(i)][numberNeuronsPerLayer.get(i+1)];
			weights.add(layerWeights);
		}
	}
	
	private void randomize()
	{
		for (int l = 0; l<weights.size();l++)
		{
			for (int i=0; i< weights.get(l).length;i++)
			{
				for (int j=0; j< weights.get(l)[0].length;j++)
				{
					weights.get(l)[i][j] = (Math.random()-0.5)*100;
				}
			}
		}
	}
	public Double[] generateSample()
	{
		if (withBias)
			return generateSampleBias();
		else
		{
			return generateSampleNoBias((int) (Math.random()*weights.get(0).length));
		}
	}
	private Double[] generateSampleBias()
	{
		Double[] input = new Double[1];
		input[0]=1.;
		Double[] output = null;
		for (int l = 0; l<weights.size();l++)
		{
			output = new Double[weights.get(l)[0].length+1];
			for (int out=0;out<output.length-1;out++)
			{
				output[out]=0.;
				for (int in=0;in<input.length;in++)
				{
					output[out]+=input[in]*weights.get(l)[in][out];
				}
				output[out] = activationFunctionNeuron(output[out]);
			}
			input = output;
			input[input.length-1] = 1.;
		}
		input = new Double[output.length-1];
		System.arraycopy(output, 0, input, 0, input.length);
		return input;
	}
	
	private Double[] generateSampleNoBias(int softMaxIndex)
	{
		Double[] input = new Double[weights.get(0).length];
		for (int i = 0; i < input.length; i++)
		{
			input[i] = 0.;
		}
		input[softMaxIndex] = 1.;
		Double[] output = null;
		for (int l = 0; l<weights.size();l++)
		{
			output = new Double[weights.get(l)[0].length];
			for (int out=0;out<output.length;out++)
			{
				output[out]=0.;
				for (int in=0;in<input.length;in++)
				{
					output[out]+=input[in]*weights.get(l)[in][out];
				}
				output[out] = activationFunctionNeuron(output[out]);
			}
			input = output;
		}
		return input;
	}
	
	public void learnStep(List<byte[]> images)
	{
		int imageIndex = (int) (Math.random()*images.size()*2);
		if (imageIndex>=images.size())
		{
			imageIndex = images.size()-1;
		}
		int softMaxIndex = (int) (Math.random()*weights.get(0).length);
		Double[] output = generateSampleNoBias(softMaxIndex);
		for (int i = 0; i < output.length; i++)
		{
			if ((output[i].byteValue())!=images.get(imageIndex)[i])
			{
				if (output[i]==1)
				{
					weights.get(0)[softMaxIndex][i]-=LEARNING_RATE ;
				}
				else
				{
					weights.get(0)[softMaxIndex][i]+=LEARNING_RATE ;
				}
			}
		}
	}
	
	public void learnStep2(List<byte[]> images)
	{
		int imageIndex = (int) (Math.random()*images.size()*2);
		if (imageIndex>=images.size())
		{
			imageIndex = images.size()-1;
		}
		int softMaxIndex = (int) (Math.random()*weights.get(0).length);
		Double[] output = generateSampleNoBias(softMaxIndex);
		
		int max=Integer.MIN_VALUE;
		int indexMax = 0;
		System.out.println("Values");
		for (int img = 0;img<images.size();img++)
		{
			int tmpCoincidence = GenerativeFitness.coincidence(images.get(img), output);
			System.out.print(tmpCoincidence + " ");
			if (tmpCoincidence>max)
			{
				max = tmpCoincidence;
				indexMax = img;
			}
		}
		System.out.println("");
		for (int i = 0; i < output.length; i++)
		{
			if ((output[i].byteValue())!=images.get(indexMax)[i])
			{
				if (output[i]==1)
				{
					weights.get(0)[softMaxIndex][i]-=LEARNING_RATE ;
				}
				else
				{
					weights.get(0)[softMaxIndex][i]+=LEARNING_RATE ;
				}
			}
		}
		System.out.println(softMaxIndex+" "+indexMax);
	}

	public void learnStepRandom(List<byte[]> images)
	{
		int softMaxIndex = (int) (Math.random()*weights.get(0).length);
		Double[] output = generateSampleNoBias(softMaxIndex);
		
		int indexMax = (int) (Math.random()*images.size());;

		for (int i = 0; i < output.length; i++)
		{
			if ((output[i].byteValue())!=images.get(indexMax)[i])
			{
				if (output[i]==1)
				{
					weights.get(0)[softMaxIndex][i]-=LEARNING_RATE ;
				}
				else
				{
					weights.get(0)[softMaxIndex][i]+=LEARNING_RATE ;
				}
			}
		}
		System.out.println(softMaxIndex+" "+indexMax);
	}
	
	public static int getChromosomeLength(List<Integer> numberNeuronsPerLayer)
	{
		if (numberNeuronsPerLayer.size()<1)
		{
			throw new IllegalArgumentException("There should be at least one layer");
		}
		int noGenes = 0;
		List<Integer> cloneNumberNeurons = new ArrayList<Integer>(numberNeuronsPerLayer);
		cloneNumberNeurons.add(0, 0);
		
		for (int l = 0; l<cloneNumberNeurons.size()-1;l++)
		{
			noGenes+=(cloneNumberNeurons.get(l)+1)*cloneNumberNeurons.get(l+1);
		}
		return noGenes;
	}

	public static int getChromosomeLengthNoBias(List<Integer> numberNeuronsPerLayer)
	{
		if (numberNeuronsPerLayer.size()<2)
		{
			throw new IllegalArgumentException("There should be at least two layers");
		}
		int noGenes = 0;
		
		for (int l = 0; l<numberNeuronsPerLayer.size()-1;l++)
		{
			noGenes+=(numberNeuronsPerLayer.get(l))*numberNeuronsPerLayer.get(l+1);
		}
		return noGenes;
	}
	
	public static Double[] generateSample(List<Integer> numberNeuronsPerLayer,Individual<DoubleChromosome> chromosome)
	{
		if (numberNeuronsPerLayer.size()<1)
		{
			throw new IllegalArgumentException("There should be at least one layer");
		}
		int offsetAllele = 0;
		List<Integer> cloneNumberNeurons = new ArrayList<Integer>(numberNeuronsPerLayer);
		cloneNumberNeurons.add(0, 0);
		
		Double[] input = new Double[1];
		input[0]=1.;
		Double[] output = null;
		
		for (int l = 1; l<cloneNumberNeurons.size();l++)
		{
			output = new Double[cloneNumberNeurons.get(l)+1];
			for (int out = 0;out<cloneNumberNeurons.get(l);out++)
			{
				output[out]=0.;
				for (int in=0;in<input.length;in++)
				{
					output[out]+=input[in]*chromosome.getChromosome().getValue(offsetAllele++);
				}
				output[out] = activationFunctionNeuron(output[out]);
			}
			input = output;
			input[input.length-1] = 1.;
		}
		input = new Double[output.length-1];
		System.arraycopy(output, 0, input, 0, input.length);
		return input;
	}
	
	public static Double[] generateSampleNoBias(List<Integer> numberNeuronsPerLayer,Individual<BitwiseChromosome> chromosome)
	{
		if (numberNeuronsPerLayer.size()<2)
		{
			throw new IllegalArgumentException("There should be at least two layers");
		}
		int offsetAllele = 0;
		
		Double[] input = new Double[numberNeuronsPerLayer.get(0)];
		for (int i = 0; i < input.length; i++)
		{
			input[i] = 0.;
		}
		input[(int) (Math.random()*input.length)] = 1.;
		Double[] output = null;
		
		for (int l = 1; l<numberNeuronsPerLayer.size();l++)
		{
			output = new Double[numberNeuronsPerLayer.get(l)];
			for (int out = 0;out<numberNeuronsPerLayer.get(l);out++)
			{
				output[out]=0.;
				for (int in=0;in<input.length;in++)
				{
					Double value = (Double)chromosome.getChromosome().getValueAt(offsetAllele++);
					output[out]+=(input[in]*value)/10.;
				}
				output[out] = activationFunctionNeuron(output[out]);
			}
			input = output;
		}
		return output;
	}
	
	public static Double activationFunctionNeuron(Double totalInput)
	{
		return (Math.random()<= 1./(1+Math.exp(-totalInput)))?1.:0;

	}

	public int test(byte[] bs)
	{
		Double[] input = new Double[weights.get(weights.size()-1)[0].length];
		for (int i = 0; i < input.length; i++)
		{
			input[i] = new Double(bs[i]);
		}
		Double[] output = null;
		for (int l = weights.size()-1; l>=0;l--)
		{
			output = new Double[weights.get(l).length];
			for (int out=0;out<output.length;out++)
			{
				output[out]=0.;
				for (int in=0;in<input.length;in++)
				{
					output[out]+=input[in]*weights.get(l)[out][in];
				}
				//output[out] = activationFunctionNeuron(output[out]);
			}
			input = output;
		}
		double max = Double.NEGATIVE_INFINITY;
		int indexMax=-1;
		for (int out=0;out<output.length;out++)
		{
			if (output[out]>max)
			{
				max = output[out];
				indexMax = out;
			}
		}
		return indexMax;
	}
}
