import java.util.Random;


/**
 * @author Shibei, Simon
 * The network for backpropagation.
 */
public class Network {
	double[][] inputWeight;
	double[][] outputWeight;
	double[] hiddenBias;
	double[] outputBias;
	double[] input;
	double[] hiddenLayer;
	double[] outputLayer;
	int inputLayerNumber, hiddenLayerNumber, outputLayerNumber;
	double learningRate = 0.3;
	double hitThreshold = 0.5;
	public int classNumber = 2;
	boolean isDebug = false;
	
    double[][] tempInputWeight;
    double[][] tempOutputWeight;
	
	public Network(int inputLayerNumber, int hiddenLayerNumber, int outputLayerNumber){
		this.inputLayerNumber = inputLayerNumber;
		this.hiddenLayerNumber = hiddenLayerNumber;
		this.outputLayerNumber = outputLayerNumber;
		inputWeight = new double[inputLayerNumber][hiddenLayerNumber];
		outputWeight = new double[hiddenLayerNumber][outputLayerNumber];
		hiddenBias = new double[hiddenLayerNumber];
		outputBias = new double[outputLayerNumber];
		if (outputLayerNumber > 2){
			classNumber = outputLayerNumber;
		}
		initNetwork();
	}
	
	public void initSAMAtrix(){
		tempInputWeight = new double[inputLayerNumber][hiddenLayerNumber];
        tempOutputWeight = new double[hiddenLayerNumber][outputLayerNumber];
	}
	
	public void setLearningRate(double l){
		learningRate = l;
	}
	
	public void setClassNumber(int num){
		classNumber = num;
	}
	
	void initNetwork(){
		Random seed = new Random();
		for (int i = 0; i < inputLayerNumber; i++){
			for (int j = 0; j < hiddenLayerNumber; j++)
				inputWeight[i][j] = getRandomValue(seed);
		}
		
		for (int i = 0; i < hiddenLayerNumber; i++){
			for (int j = 0; j < outputLayerNumber; j++)
				outputWeight[i][j] = getRandomValue(seed);
		}
		
		for (int i = 0; i < hiddenLayerNumber; i++){
			hiddenBias[i] = getRandomValue(seed);
		}
		
		for (int i = 0; i < outputLayerNumber; i++){
			outputBias[i] = getRandomValue(seed);
		}
	}
	
	public void forword(double[] input){
		this.input = input;
		hiddenLayer = new double[hiddenLayerNumber];
		for (int i = 0; i < hiddenLayerNumber; i++){
			hiddenLayer[i] = hiddenBias[i];
			for (int j = 0; j < inputLayerNumber; j++){
				hiddenLayer[i] += input[j] * inputWeight[j][i];
			}
			hiddenLayer[i] = getOutputFromInput(hiddenLayer[i]);
		}
		
		outputLayer = new double[outputLayerNumber];
		for (int i = 0; i < outputLayerNumber; i++){
			outputLayer[i] = outputBias[i];
			for (int j = 0; j < hiddenLayerNumber; j++){
				outputLayer[i] += hiddenLayer[j] * outputWeight[j][i];
			}
			outputLayer[i] = getOutputFromInput(outputLayer[i]);
		}
	}
	
	public void backpropagation(double[] output){
		double[] outputError = new double[outputLayerNumber];
		for (int i = 0; i < outputLayerNumber; i++){
			outputError[i] = outputLayer[i] * (1 - outputLayer[i]);
			outputError[i] *= (output[i] - outputLayer[i]);
		}
		
		double[] hiddenError = new double[hiddenLayerNumber];
		for (int i = 0; i < hiddenLayerNumber; i++){
			hiddenError[i] = hiddenLayer[i] * (1 - hiddenLayer[i]);
			double temp = 0;
			for (int j = 0; j < outputLayerNumber; j++){
				temp += outputError[j] * outputWeight[i][j];
			}
			hiddenError[i] *= temp;
		}
		
		for (int i = 0; i < hiddenLayerNumber; i++){
			for (int j = 0; j < outputLayerNumber; j++){
				outputWeight[i][j] += learningRate * outputError[j] * hiddenLayer[i];
			}
		}
		
		for (int i = 0; i < inputLayerNumber; i++){
			for (int j = 0; j < hiddenLayerNumber; j++){
				inputWeight[i][j] += learningRate * hiddenError[j] * input[i];
			}
		}
		
		for (int i = 0; i < outputLayerNumber; i++){
			outputBias[i] += learningRate * outputError[i];
		}
		
		for (int i = 0; i < hiddenLayerNumber; i++){
			hiddenBias[i] += learningRate * hiddenError[i];
		}
	}

	/**
	 * @param seed Random seed
	 * @return Random double from -1 to 1
	 */
	double getRandomValue(Random seed){
		return (seed.nextDouble() - 0.5) * 2;
	}
	

	/**
	 * Define the transformation function of each node from input to output
	 * @param input input number of node in network
	 * @return output output number of node in network
	 */
	double getOutputFromInput(double input){
		double temp = 1 + Math.pow(Math.E, -input);
		return (double)1 / temp;
	}
	
	public void printNetwork(){
		System.out.println("Input Matrix");
		for (int i = 0; i < inputLayerNumber; i++){
			for (int j = 0; j < hiddenLayerNumber; j++)
			{
				System.out.printf("% .3f ", inputWeight[i][j]);
			}
			System.out.println();
		}
		
		System.out.println("Output Matrix");
		for (int i = 0; i < hiddenLayerNumber; i++){
			for (int j = 0; j < outputLayerNumber; j++)
			{
				System.out.printf("% .3f ", outputWeight[i][j]);
			}
			System.out.println();
		}
		
		System.out.println("HiddenLayer Bias");
		for (int i = 0; i < hiddenLayerNumber; i++){
			System.out.printf("% .3f ", hiddenBias[i]);
		}
		System.out.println();
		
		System.out.println("OutLayer Bias");
		for (int i = 0; i < outputLayerNumber; i++){
			System.out.printf("% .3f ", outputBias[i]);
		}
		System.out.println();
	}
	
	/**
	 * @param input
	 * @return if the result of classification of a certain input is right.
	 */
	public boolean testTuple(double[] input){
		double[] tuple = new double[input.length - 2];
		for (int j = 0; j < input.length - 2; j++){
			tuple[j] = input[j+2];
		}
		this.forword(tuple);
		double[] output = getOutputFromNumber(input[1]);
		for (int i = 0; i < outputLayerNumber; i++){
			if (Math.abs(outputLayer[i] - output[i]) > hitThreshold)
				return false;
		}
		return true;
	}
	
	/**
	 * The output label is defined as a number. 0, 1, 2, 3, 4, etc.
	 * This method transfer the number into an array to meet the need of
	 * structure of network.
	 * @param input
	 * @return the array of output for backpropagation.
	 */
	public double[] getOutputFromNumber(double output){
		double[] result;
		if (classNumber == 2){
			result = new double[1];
			result[0] = output;
		} else {
			result = new double[classNumber];
			result[(int)output] = 1;
		}

		return result;
	}
	
	/* (non-Javadoc)
	 * @see java.lang.Object#clone()
	 * get a network with the same weight matrix
	 */
	public Network clone(){
		Network clone = new Network(inputLayerNumber, hiddenLayerNumber, outputLayerNumber);
		for (int i = 0; i < inputLayerNumber; i++){
			for (int j = 0; j < hiddenLayerNumber; j++)
				clone.inputWeight[i][j] = this.inputWeight[i][j];
		}
		
		for (int i = 0; i < hiddenLayerNumber; i++){
			for (int j = 0; j < outputLayerNumber; j++)
				clone.outputWeight[i][j] = this.outputWeight[i][j];
		}
		
		for (int i = 0; i < hiddenLayerNumber; i++){
			clone.hiddenBias[i] = this.hiddenBias[i];
		}
		
		for (int i = 0; i < outputLayerNumber; i++){
			clone.outputBias[i] = this.outputBias[i];
		}
		return clone;
	}
	/*
	 *calculate the error value for a single record for sa
	 */
	public double getErrSquare(double[] input){
        double res=0.0;
        double[] tuple = new double[input.length - 2];
        for (int j = 0; j < input.length - 2; j++){
            tuple[j] = input[j+2];
        }
        this.forword(tuple);
        for (int i = 0; i < outputLayerNumber; i++){
            res += Math.pow((outputLayer[i] - input[1]),2.0);
        }
        return res;
    }
    
    /*
     * use a temp weight matrix to keep the selected record weight matrix
     */
    public void assignWeightTemp(int type)
    {
        if(type==0){
            for (int i = 0; i < hiddenLayerNumber; i++){
                for (int j = 0; j < outputLayerNumber; j++){
                    tempOutputWeight[i][j] = outputWeight[i][j];
                }
            }
            
            for (int i = 0; i < inputLayerNumber; i++){
                for (int j = 0; j < hiddenLayerNumber; j++){
                    tempInputWeight[i][j] = inputWeight[i][j];
                }
            }
        }
        else {
            for (int i = 0; i < hiddenLayerNumber; i++){
                for (int j = 0; j < outputLayerNumber; j++){
                    outputWeight[i][j] = tempOutputWeight[i][j];
                }
            }
            
            for (int i = 0; i < inputLayerNumber; i++){
                for (int j = 0; j < hiddenLayerNumber; j++){
                    inputWeight [i][j] = tempInputWeight[i][j];
                }
            }
            
        }
       
    }
    /*
     * randomly change the weight during the sa process
     */
    public void randomChangeWeight(){
        Random seed = new Random();
        for (int i = 0; i < hiddenLayerNumber; i++){
            for (int j = 0; j < outputLayerNumber; j++){
               
                outputWeight[i][j] = outputWeight[i][j]+getRandomValue(seed,5);
            }
        }
        
        for (int i = 0; i < inputLayerNumber; i++){
            for (int j = 0; j < hiddenLayerNumber; j++){
                inputWeight[i][j] = inputWeight[i][j]+getRandomValue(seed,5);
            }
        }
    }
	
	/**
	 * @param seed
	 * @param range
	 * @return get a random double number within the [{@code -range}, {@code range} ].
	 */
	double getRandomValue(Random seed,int range){
	    return (seed.nextDouble() - 0.5) * range * 2;
	}
}
