package othello.neuralnetwork;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.Random;

public class NeuralNetwork {
	private boolean already_saved = false;
	private double[][][] weights; 
	private double[] layerIn;
	private double[] layerOut;
	private double[] layerHidden;
	private int hiddenNodes;
	private int outputNodes;
        
    public NeuralNetwork(int hiddenNodesArg, int outputNodesArg){
    	outputNodes = outputNodesArg;
        hiddenNodes = hiddenNodesArg;
        weights = new double[2][][];
        weights[0] = new double[65][hiddenNodes];
        weights[1] = new double[hiddenNodes][outputNodes];
        
        layerIn = new double[65];
        layerOut = new double[outputNodes];
        layerHidden = new double[hiddenNodes];
        
        Random generator = new Random();
        
        // As a rule, use i for indexing input nodes,   
        // j for indexing hidden nodes
        // and k for indexing output nodes
        // to avoid confusion (even if it leads to superfluous code)        
        for (int i = 0; i != 65; i++){
            for (int j = 0; j != hiddenNodes; j++){
                weights[0][i][j] = (generator.nextDouble() - 0.5);
            }
        }
        
        for (int j = 0; j != hiddenNodes; j++){
            for (int k = 0; k != outputNodes; k++){
                weights[1][j][k] = (generator.nextDouble() - 0.5);
            }
        }
    }
    
    private static final double sigmoid(double a) {
    	return LUT.sigmoid2((float) a);        
    }
    
    public void forwardPropagate(double[] input){
    	// copy the input to the input layer
        System.arraycopy(input, 0, layerIn, 0, 64);
        
        //bias unit
        layerIn[64] = 1.0;
        
        //determine the activation of the hidden layer nodes   
        double activation = 0.0;
        for(int j = 0; j != hiddenNodes; ++j){            
            for(int i = 0; i != 65; ++i){
            	activation += weights[0][i][j] * layerIn[i];
            }
            layerHidden[j] = sigmoid(activation);
            activation = 0.0;
        }
        
        //determine the activation of the output layer     
        activation = 0;
        for(int k = 0; k != outputNodes; ++k){
            for (int j = 0; j != hiddenNodes; ++j){
                activation += weights[1][j][k] * layerHidden[j];
            }
            layerOut[k] = sigmoid(activation);
            activation = 0;
        }
        
    }
    
    public double[] getOutput(){
        return layerOut;
    }
    
    public double[] getRegressionOutput(){
        return layerOut;
    }
    
    public void backPropagate(double error, double learningRate, int k){
       double[][][] newWeights = new double[2][][];
       newWeights[0] = new double[65][hiddenNodes];
       newWeights[1] = new double[hiddenNodes][outputNodes];
        
    	double learningRateXerrorXsigmoidDerivative = learningRate * error * layerOut[k] * (1.0 - layerOut[k]);
    	    	
    	// update hidden-to-output weights
        for (int j = 0; j != hiddenNodes; ++j){
        	newWeights[1][j][k] = weights[1][j][k] + learningRateXerrorXsigmoidDerivative * layerHidden[j];
        }        
        
        // update input-to-hidden weights
        for (int j = 0; j != hiddenNodes; ++j){
            double weightedSum = error * weights[1][j][k];
            double learningRateXweightedSumXderivative = learningRate * weightedSum * layerHidden[j] * (1.0 - layerHidden[j]);
            for (int i = 0; i != 65; ++i){
            	newWeights[0][i][j] = weights[0][i][j] + learningRateXweightedSumXderivative * layerIn[i];
            }
        }
        
        // copy new weights to weights (input to hidden)
        for (int i = 0; i != 65; ++i){
            for (int j = 0; j != hiddenNodes; ++j){
                weights[0][i][j] = newWeights[0][i][j];                
            }
        }
        
     	// copy new weights to weights (hidden to output)
        for (int j = 0; j != hiddenNodes; ++j){
            weights[1][j][k] = newWeights[1][j][k];        	            
        }
    }
    
    public void saveWeights(String playerName) {
    	if (already_saved)	// Sometimes it wants to save twice
    		return;
    	
        try {
        	(new File("weights")).mkdir();
        	FileWriter fstream = new FileWriter("weights/weights_" + playerName + ".csv");
        	BufferedWriter out = new BufferedWriter(fstream);
        	for (int i = 0; i != 65; i++){
        		for (int j = 0; j != hiddenNodes-1; j++){
        			out.write(String.format("%f",weights[0][i][j]) + ',');
        		}
        		out.write(String.format("%f",weights[0][i][hiddenNodes-1]) + '\n');
        	}
    		for (int j = 0; j != hiddenNodes; j++){
    			for (int k = 0; k != outputNodes-1; k++){
        			out.write(String.format("%f",weights[1][j][k]) + ',');
        		}
        		out.write(String.format("%f",weights[1][j][outputNodes-1]) + '\n');
        	}
        	out.close();
        	fstream.close();
        } catch (Exception e){
        	System.err.println("Error: " + e.getMessage());
        }
        already_saved = true;
    }
    
    public void loadWeights(String playerName) {
    	try {
    		FileReader fstream = new FileReader("weights/weights_" + playerName + ".csv");
    		BufferedReader in = new BufferedReader(fstream);
    		for (int i = 0; i != 65; i++){
    			String[] parts = in.readLine().split(",");
        		for (int j = 0; j != hiddenNodes; j++){
        			weights[0][i][j] = Float.parseFloat(parts[j]);
        		}
    		}
    		for (int j = 0; j != hiddenNodes; j++){
    			String[] parts = in.readLine().split(",");
    			for (int k = 0; k != outputNodes; k++){
        			weights[1][j][k] = Float.parseFloat(parts[k]);
        		}
        	}
    		in.close();
        	fstream.close();
    	} catch (Exception e){
        	System.err.println("Error: " + e.getMessage());
        }
    }
}
