package org.joone.engine;

import java.util.ArrayList;
import java.util.Collection;
import org.joone.inspection.implementations.BiasInspection;
import org.joone.log.*;

/**
 * The output of a linear layer neuron is the sum of the weighted input values,
 * scaled by the beta parameter. No transfer function is applied to limit the
 * output value
 * 
 * @see BiasedLinearLayer 这个是有偏置的
 */
public class LinearLayer extends SimpleLayer {

	private static final ILogger log = LoggerFactory.getLogger(LinearLayer.class);

	private static final long serialVersionUID = 2243109263560495304L;

	private double beta = 1;

	public LinearLayer() {
		super();
	}

	public LinearLayer(String elemName) {
		super(elemName);
	}

	@Override
	public void backward(double[] pattern) {
		int x;
		int n = getRows();
		for (x = 0; x < n; ++x) {
			// 返回的错误率也乘以beta
			gradientOuts[x] = pattern[x] * beta;
		}
	}

	@Override
	public void forward(double[] pattern) {
		int x;
		int n = getRows();
		for (x = 0; x < n; ++x) {
			// 这里只是对入参 乘 因子beta，并没有加相应的偏置
			outs[x] = beta * pattern[x]; // + bias.value[x][0];
		}
	}

	/**
	 * Returns the value of the beta parameter
	 * 
	 * @return double - The beta parameter
	 */
	public double getBeta() {
		return beta;
	}

	/**
	 * Sets the beta value
	 * 
	 * @param newBeta double
	 */
	public void setBeta(double newBeta) {
		beta = newBeta;
	}

	/**
	 * It doesn't make sense to return biases for this layer
	 * 
	 * @return null
	 */
	public Collection Inspections() {
		Collection col = new ArrayList();
		col.add(new BiasInspection(null));
		return col;
	}
}