package nn.networks;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;

import nn.Neuron;
import nn.layers.BPInputLayer;
import nn.layers.BPLayer;
import nn.layers.InputLayer;
import nn.layers.Layer;
import nn.phraseparameters.BPPhraseParameters;

public class BPNetwork {

	protected ArrayList<BPLayer> layerList = new ArrayList<>();

	float learnSpeed;
	float momentum;
	boolean printEpochs = true;

	public void epochLearn(ArrayList<BPPhraseParameters> phraseParList, ArrayList<float[]> inputsList,
			ArrayList<float[]> outputsList) throws Exception {

		// TODO - implement

		BPNetwork net = this;
		int phrase = -1;
		for (int epoch = 0; epoch < phraseParList.get(phraseParList.size() - 1).epochThred; ++epoch) {
			if (epoch % 1000 == 0 && printEpochs) {
				System.out.println(epoch + ", ");
			}
			if (epoch < phraseParList.get(0).epochThred) {
				if (phrase != 0) {
					System.out.println("Phrase: 0");
					phrase = 0;
				}
				net.learnSpeed = phraseParList.get(0).learnSpeed;
				net.momentum = phraseParList.get(0).momentum;
			} else if (epoch < phraseParList.get(1).epochThred) {
				if (phrase != 1) {
					System.out.println("Phrase: 1");
					phrase = 1;
				}
				net.learnSpeed = phraseParList.get(1).learnSpeed;
				net.momentum = phraseParList.get(1).momentum;
			} else if (epoch < phraseParList.get(2).epochThred) {
				if (phrase != 2) {
					System.out.println("Phrase: 2");
					phrase = 2;
				}
				net.learnSpeed = phraseParList.get(2).learnSpeed;
				net.momentum = phraseParList.get(2).momentum;
			} else if (epoch < phraseParList.get(3).epochThred) {
				if (phrase != 3) {
					System.out.println("Phrase: 3");
					phrase = 3;
				}
				net.learnSpeed = phraseParList.get(3).learnSpeed;
				net.momentum = phraseParList.get(3).momentum;
			}
			net.learn(inputsList, outputsList, epoch);
		}
	}

	private void learn(ArrayList<float[]> inputs, ArrayList<float[]> outputs, int epoch) throws Exception {
		BPLayer prevLayer;
		BPLayer layer = layerList.get(1);
		BPLayer nextlayer;
		for (int inputIntIt = 0; inputIntIt < inputs.size(); ++inputIntIt) {
			float[] inputTabIt = inputs.get(inputIntIt);
			// TODO normalize?
			// inputTabIt = normalize(inputTabIt);
			float[] result = inputTabIt;
			for (int it = 0; it < layerList.size(); ++it) {
				// saves raw and calculated values in all neurons for later
				// usage
				result = layerList.get(it).getResults(result);
			}
			// output layer
			prevLayer = layerList.get(layerList.size() - 2);
			layer = layerList.get(layerList.size() - 1);
			layer.setErrorAsLastLayer(outputs.get(inputIntIt));
			layer.celculateDelta(learnSpeed, momentum, prevLayer.getResults());

			// main loop over all layers
			// start from one before last layer, ends at layer 1
			for (int it = layerList.size() - 2; it > 0; --it) {
				prevLayer = layerList.get(it - 1);
				layer = layerList.get(it);
				nextlayer = layerList.get(it + 1);
				float[] nextLayerError = nextlayer.getError();
				List<float[]> revertedWeights = createRevertedWeights(layer, nextlayer);
				layer.setError(nextLayerError, revertedWeights);
				layer.celculateDelta(learnSpeed, momentum, prevLayer.getResults());
			}
			// update weights (in ALL layers)
			for (int it = 1; it < layerList.size(); ++it) {
				layerList.get(it).updateNeuronWeights();
			}
			// TODO remove
			// printNetwork();
		}
	}

	public float[] calculate(float[] input) throws Exception {
		if (input.length != layerList.get(0).size) {
			throw new Exception("input size not match network input, " + input.length + " instead of "
					+ layerList.get(0).size);
		}
		float[] result = input;
		for (int it = 1; it < layerList.size(); ++it) {
			result = layerList.get(it).getResults(result);
		}
		return result;
	}

	public void printNetwork() {
		int count = 0;
		System.out.println("layer count: " + layerList.size());
		for (Layer l : layerList) {
			System.out.println("layer: " + count + ", size: " + +l.size);
			if (l instanceof InputLayer) {
				System.out.println("\tinput layer");
			} else {
				for (Neuron n : l.getNeuronList()) {

					float[] w = n.getWeights();
					String s = "";
					for (int i = 0; i < w.length; ++i) {
						s = s + ", " + w[i];
					}
					// Float bias = n.getBias();

					System.out.println("\t" + s.substring(2) + ", bias:" + n.getBias());
				}
			}
			++count;
		}
	}

	/**
	 * reads layers configuration from file. first n lines represents number of
	 * layers neurons. Each line should contains one number with number of its
	 * neurons. Optionally line can contain type of this layers activation
	 * function. Strings with act. func. names are specified in
	 * ActivationFunctionFactory class.
	 * 
	 * After layers specification file must contain line with key word "END".
	 * Every next line contain weights of each layer (without input layer)
	 * neurons. E.g. if network contains input layer and 2 other layers, first
	 * with 2 neurons and second with 4 neurons, after line with keyword "END"
	 * file must contains 6 lines. Two with <input_size> float weights, and four
	 * with two float weights. Optionally, bias for this neuron can be specified
	 * as additional number at the end of line.
	 * 
	 * ALTERNATIVELY:
	 * after END:
	 * AUTO - all neurons have random 0-1 values and biases
	 * AUTO x y - all neurons have random x=y values and biases
	 * r/R x y - this neuron will have x-y random weights (WARRING: you must use this notation to all neurons in this layer)
	 * r/R x y b - like above, plus bias is equal b
	 * r/R x y b1 b1 - bias random in range b1-b2
	 * @param path
	 *            path to file
	 * @throws IOException
	 *             Signals that an I/O exception has occurred or file don't have
	 *             proper format.
	 */
	// TODO - min 1 layer
	public void readConfFile(String path) throws IOException {
		DataInputStream in;
		FileInputStream fstream;
		BPLayer layer;
		int lineCount = 1;
		try {
			fstream = new FileInputStream(path);
			in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));
			String line;
			line = br.readLine();
			// reading layers parameters (size, bias activation function)
			while (line != null) {
				if (line.startsWith("#")) {
					line = br.readLine();
					++lineCount;
					continue;
				}
				if (line.equals("")) {
					line = br.readLine();
					++lineCount;
					continue;
				}
				if (line.contains("END")) {
					break;
				}
				String[] lineArray = line.split("\\s+");
				switch (lineArray.length) {
				case 1:
					layer = new BPLayer(Integer.parseInt(lineArray[0]));
					break;
				case 2:
					layer = new BPLayer(Integer.parseInt(lineArray[0]), lineArray[1]);
					break;
				case 0:
					// TODO - chceck if ok (continue)
					line = br.readLine();
					++lineCount;
					continue;
				default:
					throw new IOException("illegal arguments or to many arguments in line:" + lineCount);
				}
				layerList.add(layer);
				line = br.readLine();
				++lineCount;
			}

			if (line == null) {
				throw new IOException("no \"END\" line");
			}
			Layer firstLayer = layerList.remove(0);
			layerList.add(0, new BPInputLayer(firstLayer));

			line = br.readLine();
			++lineCount;
			int layerCounter = 1;
			while (line.split("\\s+").length == 0 || line.equals("")) {
				line = br.readLine();
				++lineCount;
			}
			if (line.startsWith("AUTO")) {
				float min, max;
				max = 0;
				min = 1;
				String[] lineArray = line.split("\\s+");
				if(lineArray.length == 3){
					max = Float.parseFloat(lineArray[1]);
					min = Float.parseFloat(lineArray[2]);
				}
				while (layerCounter != layerList.size()) {
					int layerSize = layerList.get(layerCounter).size;
					int prevLayerSize = layerList.get(layerCounter - 1).size;
					for (int neuronIt = 0; neuronIt < layerSize; ++neuronIt) {
						float[] weightsArray = new float[prevLayerSize];
						

						for (int it = 0; it < prevLayerSize; ++it) {
							weightsArray[it] = (float) (min + (Math.random() * (max - min)));

						}
						layerList.get(layerCounter).initNeuronWeights(neuronIt, weightsArray);
					
							layerList.get(layerCounter).setNeuronBias(neuronIt,
									(float) (min + (Math.random() * (max - min))));
						
						++lineCount;

					}
					++layerCounter;
				}
			} else {
				// reading neurons weights
				while (line != null) {
					if (line.startsWith("#")) {
						line = br.readLine();
						++lineCount;
						continue;
					}
					if (layerCounter > layerList.size()) {
						throw new IOException("to many weights lines, instead of " + layerList.size());
					}
					if (line.startsWith("R") || line.startsWith("r")) {
						String[] lineArray = line.split("\\s+");
						if (3 != lineArray.length && 4 != lineArray.length && 5 != lineArray.length) {
							throw new IOException("in line " + lineCount + ", should be 2 or 3 values (r -> random)");
						}
						int layerSize = layerList.get(layerCounter).size;
						int prevLayerSize = layerList.get(layerCounter - 1).size;
						for (int neuronIt = 0; neuronIt < layerSize; ++neuronIt) {

							float[] weightsArray = new float[prevLayerSize];

							float min, max;

							max = Float.parseFloat(lineArray[1]);
							min = Float.parseFloat(lineArray[2]);

							for (int it = 0; it < prevLayerSize; ++it) {
								weightsArray[it] = (float) (min + (Math.random() * (max - min)));

							}
							layerList.get(layerCounter).initNeuronWeights(neuronIt, weightsArray);
							// layerList.get(layerCounter).setNeuronWeights(neuronIt,
							// weightsArray);
							if (4 == lineArray.length) {
								layerList.get(layerCounter).setNeuronBias(neuronIt, Float.parseFloat(lineArray[3]));
							}
							if (5 == lineArray.length) {
								max = Float.parseFloat(lineArray[3]);
								min = Float.parseFloat(lineArray[4]);
								layerList.get(layerCounter).setNeuronBias(neuronIt,
										(float) (min + (Math.random() * (max - min))));
							}
							++lineCount;
							line = br.readLine();

						}

						++lineCount;
						++layerCounter;
						continue;
					}
					if (line.split("\\s+").length == 0 || line.equals("")) {
						line = br.readLine();
						++lineCount;
						continue;
					}
					int layerSize = layerList.get(layerCounter).size;
					int prevLayerSize = layerList.get(layerCounter - 1).size;
					// one layer's neurons
					for (int neuronIt = 0; neuronIt < layerSize; ++neuronIt) {
						String[] lineArray = line.split("\\s+");
						if (prevLayerSize != lineArray.length && (prevLayerSize + 1) != lineArray.length) {
							throw new IOException("number of weights (" + lineArray.length + ")in line " + lineCount
									+ " do not correspond to layer " + (layerCounter - 1) + " size (" + prevLayerSize
									+ "), layer " + layerCounter + " neurons should have " + prevLayerSize + " weights");
						}
						float[] weightsArray = new float[prevLayerSize];
						for (int it = 0; it < prevLayerSize; ++it) {
							weightsArray[it] = Float.parseFloat(lineArray[it]);
						}
						System.out.println(layerSize + " " + weightsArray.length);
						layerList.get(layerCounter).initNeuronWeights(neuronIt, weightsArray);
						// layerList.get(layerCounter).setNeuronWeights(neuronIt,
						// weightsArray);
						if ((prevLayerSize + 1) == lineArray.length) {
							layerList.get(layerCounter).setNeuronBias(neuronIt,
									Float.parseFloat(lineArray[prevLayerSize]));
						}
						++lineCount;
						line = br.readLine();
					}

					++layerCounter;
				}
				if (layerCounter != layerList.size()) {
					throw new IOException("to few weights lines " + layerCounter + " instead of " + layerList.size());
				}
			}
			

			in.close();
		} catch (IOException e) {
			// System.err.println("Error while reading file");
			throw e;
		}
	}

	/**
	 * Creates the reverted weights list. List of weight which are on neurons
	 * output connections.
	 * 
	 * @param layer
	 *            the layer
	 * @param nextlayer
	 *            the nextlayer (is layer is one before last, "nextLayer" is
	 *            output layer)
	 * @return the list
	 */
	private List<float[]> createRevertedWeights(BPLayer layer, BPLayer nextlayer) {
		List<float[]> revertedWeights = new ArrayList<>();
		int nextLayerSize = nextlayer.size;
		for (int weightsIt = 0; weightsIt < layer.size; ++weightsIt) {
			revertedWeights.add(new float[nextLayerSize]);
		}
		ArrayList<Neuron> nextLayerNeuronList = nextlayer.getNeuronList();

		// loop over neurons from next layer
		// adding weights from next layer
		for (int neuronIt = 0; neuronIt < nextLayerSize; ++neuronIt) {
			float[] neuronWeights = nextLayerNeuronList.get(neuronIt).getWeights();
			// loop over THIS layer neuron in created list
			for (int neuronWeightsIt = 0; neuronWeightsIt < neuronWeights.length; ++neuronWeightsIt) {
				revertedWeights.get(neuronWeightsIt)[neuronIt] = neuronWeights[neuronWeightsIt];
			}

		}
		return revertedWeights;
	}

	private float[] normalize(float[] input) {
		float sum = 0;
		int inputSize = input.length;
		float[] newInput = new float[inputSize];
		for (int i = 0; i < inputSize; ++i) {
			sum += input[i];
		}
		if (sum == 0) {
			sum = 1;
		}
		for (int i = 0; i < inputSize; ++i) {
			newInput[i] = input[i] / sum;
		}
		return newInput;
	}
}
