/*
 * JANN - a Java toolkit for creating arbitrary Artificial Neural Networks.
 * 
 * Copyright (c) 2009 Matthijs Snel
 * 
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
package net.jann;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;

/**
 * Able to represent arbitrary neural net topologies.
 * 
 * @author Matthijs Snel
 *
 */
public class NeuralNet implements INeuralNet, Serializable {

	private static final long serialVersionUID = 483997170912587710L;
	
	protected Layer inputLayer, outputLayer;

	protected Parameters params;
	
	public void backprop( double[] delta ) {
		outputLayer.backProp(delta);
	}
	
	public void feedforward( double[] in ) {
		if ( in.length != inputLayer.size() )
			throw new IllegalArgumentException("Dimensionality of input vector does not equal dim. of input layer");
		inputLayer.feedForward(in);
	}
	
	public Layer getInputLayer() {
		return inputLayer;
	}
	
	public double[] getOutput() {
		return outputLayer.getActivation();
	}
	
	public Layer getOutputLayer() {
		return outputLayer;
	}
	
	public Parameters getParameters() {
		return params;
	}

	public void setParameters(Parameters params) {
		this.params = params;
		ArrayList<Neuron> visitedNodes = 
			new ArrayList<Neuron>( (int) (3*( inputLayer.size() + outputLayer.size() )));
		
		setParamsForLayer( inputLayer.getNeurons(), params, visitedNodes );
	}
	
	private void setParamsForLayer( Collection<? extends Neuron> layer, 
			Parameters params, Collection<Neuron> visitedNodes ) {

		if ( layer.size() == 0 )
			return;
		
		//No duplicate neurons + maintain order in which they are added
		Set<Neuron> nextNeuronSet = new LinkedHashSet<Neuron>( (int) (2*layer.size() / 0.75) );
		
		for ( Neuron n : layer ) {
			n.setParameters(params);
			visitedNodes.add(n);
			for ( Link l : n.getOutgoing() ) {
				l.setParameters(params);
				if ( !visitedNodes.contains( l.getPostNeuron() ))
					nextNeuronSet.add( l.getPostNeuron() );
			}		
		}

		setParamsForLayer( nextNeuronSet, params, visitedNodes );
	}
	
	public List<Double> packWeights() {
		ArrayList<Double> weights = new ArrayList<Double>();
		ArrayList<Neuron> visitedNodes = 
			new ArrayList<Neuron>( (int) (3*( inputLayer.size() + outputLayer.size() )));
		
		packWeightsForLayer( inputLayer.getNeurons(), weights, visitedNodes );

		return weights;
	}
	
	public double[] packWeightsArray() {
		List<Double> weights = packWeights();
		double[] result = new double[ weights.size() ];
		for ( int i = 0; i < weights.size(); i++ )
			result[i] = weights.get(i);
		return result;
	}
	
	public String weightsToString() {
		return "Not implemented yet";
	}
	
	private void packWeightsForLayer( Collection<? extends Neuron> layer, 
			List<Double> weights, Collection<Neuron> visitedNodes ) {

		if ( layer.size() == 0 )
			return;
		
		//No duplicate neurons + maintain order in which they are added
		Set<Neuron> nextNeuronSet = new LinkedHashSet<Neuron>( (int) (2*layer.size() / 0.75) );
		
		for ( Neuron n : layer ) {
			weights.add( n.getWeight() );
			visitedNodes.add(n);
			for ( Link l : n.getOutgoing() ) {
				weights.add( l.getWeight() );
				if ( !visitedNodes.contains( l.getPostNeuron() ))
					nextNeuronSet.add( l.getPostNeuron() );
			}		
		}

		packWeightsForLayer( nextNeuronSet, weights, visitedNodes );
	}
	
	public void unpackWeights( List<Double> weights ) {
		ArrayList<Neuron> visitedNodes = 
			new ArrayList<Neuron>( (int) (3*( inputLayer.size() + outputLayer.size() )));
		
		ListIterator<Double> weightsIterator = weights.listIterator();
		unpackWeightsForLayer( inputLayer.getNeurons(), weightsIterator, visitedNodes );
		
		if ( weightsIterator.hasNext() )
			throw new IllegalArgumentException("Too many weights passed for the size of the net:" + weights.size());
	}
	
	public void unpackWeights( double[] weights ) {
		List<Double> weightsList = new ArrayList<Double>( weights.length );
		for ( int i = 0; i < weights.length; i++ )
			weightsList.add( weights[i] );
		unpackWeights( weightsList );
	}
	
	private void unpackWeightsForLayer( Collection<? extends Neuron> layer, 
			ListIterator<Double> weights, Collection<Neuron> visitedNodes ) {

		if ( layer.size() == 0 )
			return;

		//No duplicate neurons + maintain order in which they are added
		Set<Neuron> nextNeuronSet = new LinkedHashSet<Neuron>( (int) (2*layer.size() / 0.75) );
		
		for ( Neuron n : layer ) {
			if ( !(n instanceof InputNeuron) )
				n.setWeight(weights.next());
			visitedNodes.add(n);
			for ( Link l : n.getOutgoing() ) {
				l.setWeight( weights.next() );
				if ( !visitedNodes.contains( l.getPostNeuron() ))
					nextNeuronSet.add( l.getPostNeuron() );
			}		
		}

		unpackWeightsForLayer( nextNeuronSet, weights, visitedNodes );
	}
}
