import java.util.ArrayList;
import java.util.List;
import java.util.Random;


public class NeuralNetwork {
	
	private List<NeuronLayer> layers = new ArrayList<NeuronLayer>();
	List<List<Float>> targetData = new ArrayList<List<Float>>();
	
	public void addLayer(NeuronLayer layer) {
		this.layers.add(layer);
	}
	
	public List<NeuronLayer> getLayers() {
		return this.layers;
	}
	
	public List<Float> compute(int num,boolean backpropagation) {
		List<Float> out = new ArrayList<Float>();
		//backpropagation
		if(backpropagation && targetData.size()>0){
			//set current outputs
			for(int i=0;i<layers.size()-1;i++) {
				layers.get(i+1).handleSignal(layers.get(i));
			}
			//set delta for output layer
			for(int i=0;i<layers.get(layers.size()-1).neurons.size();i++) {
				Neuron neuron = layers.get(layers.size()-1).neurons.get(i);
				neuron.setError(targetData.get(num).get(i)-neuron.getOutput());
			}
			//set delta
			for(int i=layers.size()-2;i>0;i--) {
				NeuronLayer layer = layers.get(i);
				NeuronLayer nextLayer = layers.get(i+1);
				for(int j=0;j<layer.neurons.size();j++) {
					float error = 0.0f;
					for(int k=0;k<nextLayer.neurons.size();k++) {
						Neuron neuron = nextLayer.neurons.get(k);
						error += neuron.getError()*neuron.weights.get(j);
					}
					layer.neurons.get(j).setError(error);
				}
			}
			//update weights
			for(int i=1;i<layers.size();i++) {
				NeuronLayer layer = layers.get(i);
				NeuronLayer previousLayer = layers.get(i-1);
				for(int j=0;j<layer.neurons.size();j++) {
					Neuron neuron = layer.neurons.get(j);
					for(int k=0;k<neuron.weights.size();k++) {
						neuron.weights.set(k, neuron.weights.get(k)+0.1f*neuron.error*neuron.derivative()*previousLayer.neurons.get(k).getOutput());
					}
				}
			}
		}
		for(int i=0;i<layers.size()-1;i++) {
			//computing
			layers.get(i+1).handleSignal(layers.get(i));
			//learning
			if(targetData.size()>0) {
				switch(layers.get(i+1).type) {
					case NeuronLayer.KOHONEN : 
						((KohonenLayer)layers.get(i+1)).learn(layers.get(i));
						break;
					case NeuronLayer.WIDROW_HOFF :
						((WidrowHoffLayer)layers.get(i+1)).learn(targetData.get(num),layers.get(i));
						break;
					default:
				}
			}
		}
		for(Neuron neuron : layers.get(layers.size()-1).getNeurons()) {
			out.add(neuron.getOutput());
		}
		return out;
	}
	
	public int getMaxWeightsNumber() {
		int max = 0;
		for(NeuronLayer layer : this.layers) {
			int neuronsNumber = layer.getNeurons().size();
			if(neuronsNumber>max) 
				max = neuronsNumber;
		}
		return max;
	}

	public void setRandomWeights(float min, float max) {
		Random random = new Random();
		float weight;
		for(NeuronLayer layer : getLayers()) {
			for(Neuron neuron : layer.getNeurons()) {
				for(int i=0; i<neuron.getWeights().size();i++) {
					weight = random.nextFloat();
					weight = Math.abs(weight-min);
					weight *= max - min;
					weight += min;
					neuron.setWeight(i, weight);
				}
			}
		}
	}

	public void setTargetData(String text) {
		this.targetData.clear();
		if(text.trim().isEmpty()) {
			System.out.println("returning");
			return;
		}
		for(String row : text.split("\n")) {
			if(!row.trim().isEmpty()) {
				List<Float> targets = new ArrayList<Float>();
				for(String val : row.split("\t")) {
					targets.add(new Float(val));
				}
				this.targetData.add(targets);
			}
		}
		System.out.println(text+this.targetData);
	}
}
