package com.zelic.znn.core;

import com.zelic.znn.functions.TanhFunction;
import com.zelic.znn.functions.OutputFunction;

/**
 * @since 17/07/2011
 * @author zelic
 * Filename: Layer.java
 * Description: Network layer
 */

/**
 * Layer is used to manage all neurons at a level
 * @author zelic
 */
public class Layer {

    private int numUnit;
    private int numInput;
    private Layer previousLayer;
    private Layer nextLayer;
    private OutputFunction outputMethod;
    private Network parent;
    private double learningRate;
    private final double momentRate = 0.05;
    
    /*
     * weight coresponds to [unit][input]
     */
    private double[][] weight;
    /*
     * delta coresponds to [unit]
     */
    private double[] delta;
    
    /*
     * old weight change for momentum
     */
    private double[][] oldDeltaWeight;
    /*
     * output coresponds to [unit]
     */
    private double[] output;
    /*
     * input coresponds to [input]
     */
    private double[] input;

    /**
     * Create a layer with specific number of inputs and units
     * @param numInput : number of inputs
     * @param numUnit : number of units
     */
    public Layer(int numInput, int numUnit, Network p, double rate) {
        this.numInput = numInput + 1; //Bias is counted
        this.numUnit = numUnit;
        nextLayer = null;
        previousLayer = null;
        outputMethod = new TanhFunction();
        parent = p;
        //weight = Util.randomMatrix(numUnit, numInput + 1);     
        weight = Util.NguyenWidrowRandomMatrix(numUnit, numInput + 1);     
  
        
        delta = new double[numUnit];
        output = new double[numUnit];
        oldDeltaWeight = new double[numUnit][numInput+1];
        learningRate = rate;
    }
    
    public Layer(int numInput, int numUnit, Network p, double rate, OutputFunction outputMethod){
        this(numInput,numUnit,p,rate);
        this.outputMethod = outputMethod;
    }

    /**
     * Set input for layer
     * @param input : input data
     */
    public void setInput(double[] input) {
        this.input = Util.biasArray(input);
    }

    /**
     * Get output from layer
     * @return output : output array
     */
    public double[] getOutput() {
        return output;
    }

    /**
     * Get weight from layer
     * @return weight matrix
     */
    public double[][] getWeight() {
        return weight;
    }
    
    /**
     * Set weight matrix for layer
     * @param data : matrix weight
     */
    public void setWeight(double[][] data){
        this.weight = data.clone();
    }
    
    /**
     * Get learning rate
     * @return learning rate
     */
    public double getLearningRate(){
        return learningRate;
    }

    /**
     * Get composite delta from layer
     * Composite delta is sum of delta x weight
     * Using number of unit of previous delta
     */
    public double[] getCompositeDelta() {

        double[] result = Util.zeroArray(previousLayer.getNumUnit());

        for (int i = 0; i < result.length; i++) {
            for (int j = 0; j < numUnit; j++) {
                result[i] += delta[j] * weight[j][i+1];
            }
        }

        return result;
    }

    /**
     * Get number of units
     * @return number of units
     */
    public int getNumUnit() {
        return numUnit;
    }

    /**
     * Get number of inputs
     * @return number of inputs
     */
    public int getNumInput() {
        return numInput;
    }

    /**
     * Set previous layer for this layer
     * @param layer : previous layer
     */
    public void setPreviousLayer(Layer layer) {
        previousLayer = layer;
    }

    /**
     * Get previous layer
     * @return previous layer
     */
    public Layer getPreviousLayer() {
        return previousLayer;
    }

    /**
     * Set next layer
     * @param next layer
     */
    public void setNextLayer(Layer next) {
        nextLayer = next;
    }

    /**
     * Get next layer
     * @return next layer
     */
    public Layer getNextLayer() {
        return nextLayer;
    }

    /**
     * Set output method
     * @param o : output method
     */
    public void setOutputMethod(OutputFunction o) {
        outputMethod = o;
    }

    /**
     * Return net of layer
     * @return array of net of units
     */
    public double[] net() {
        double[] result = Util.zeroArray(numUnit);

        for (int i = 0; i < numUnit; i++) {
            for (int j = 0; j < numInput; j++) {
                result[i] = result[i] + weight[i][j] * input[j];
            }
        }
        
        return result;
    }

    /**
     * Calculate output array
     */
    public void calculateOutput() {
        double[] net = net();
        output = outputMethod.calculateOutputArray(net);
    }

    /**
     * Calculate Delta
     * @param target output (for output layer)
     */
    public void calculateDelta(double[] target) {
        if (nextLayer == null) {
            /*
             * This is an output layer
             */
            for (int i = 0; i < numUnit; i++) {
                delta[i] = outputMethod.getDerivation(output[i]) * (target[i] - output[i]);
            }
        } else {
            /*
             * This is a hidden layer
             */
            double[] nextCompositeDelta = nextLayer.getCompositeDelta();
            for (int i = 0; i < numUnit; i++) {
                delta[i] = outputMethod.getDerivation(output[i]) * nextCompositeDelta[i];
            }
        }

    }

    /**
     * Updating weights
     * @param rate : learning rate
     */
    public void updateWeight() {
        double[][] weightChange = new double[numUnit][numInput];
        for (int i = 0; i < numUnit; i++) {
            for (int j = 0; j < numInput; j++) {
                weightChange[i][j]=learningRate * delta[i] * input[j];
                weight[i][j] = weight[i][j]+ weightChange[i][j]+momentRate*oldDeltaWeight[i][j];
            }
        }
        oldDeltaWeight = weightChange.clone();
    }
}
