package nn.networks;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.util.ArrayList;

import nn.Neuron;
import nn.layers.InputLayer;
import nn.layers.Layer;
import nn.phraseparameters.KohPhraseParameters;

import org.apache.commons.lang3.tuple.Pair;

public class KohonenNetwork {

	protected ArrayList<Layer> layerList = new ArrayList<>();
	protected float kohLearnSpeed = (float) 0.2;

	boolean weightNeigh = false;

	public static void main(String[] args) throws Exception {
		System.out.println("start");
		boolean printNetwork = true;
		boolean printEpochs = false;

		KohonenNetwork net = new KohonenNetwork();
		String path;
		path = "configurations/KOH1";
		net.readConfFile2(path);
		net.layerList.get(1).setNeighborhoodSize(1);
		if (net.weightNeigh) {
			for (Layer lay : net.layerList) {
				// lay.setNeighbours();
				lay.setNeighbors(net.layerList.get(1).getNeighborhoodSize());
			}
		}

		float[] input1 = { 1, 0, 0, 0, 1, 0, 0, 0, 1 };
		float[] input2 = { 0, 1, 0, 0, 1, 0, 0, 1, 0 };
		float[] input3 = { 0, 1, 0, 1, 1, 1, 0, 1, 0 };
		float[] input4 = { 1, 1, 1, 1, 0, 1, 1, 1, 1 };
		float[] result;
		ArrayList<float[]> inputsList = new ArrayList<float[]>();
		inputsList.add(input1);
		inputsList.add(input2);
		inputsList.add(input3);
		inputsList.add(input4);
		ArrayList<KohPhraseParameters> phraseParList = new ArrayList<>();
		phraseParList.add(new KohPhraseParameters(8000, 1, (float) 0.6));
		phraseParList.add(new KohPhraseParameters(16000, -2, (float) 0.03));
		phraseParList.add(new KohPhraseParameters(24000, -1, (float) 0.015));
		phraseParList.add(new KohPhraseParameters(32000, 0, (float) 0.0075));

		String sRes;
		int iter = 1;
		for (float[] it : inputsList) {
			sRes = "";
			result = net.calculate(it);
			for (int i = 0; i < result.length; ++i) {
				sRes += ", " + result[i];
			}
			switch (iter) {

			case (1):
				System.out.println("\\ ->" + sRes);
				break;
			case (2):
				System.out.println("| ->" + sRes);
				break;
			case (3):
				System.out.println("+ ->" + sRes);
				break;
			default:
				System.out.println("@ ->" + sRes);
				break;
			}
			++iter;
		}
		// Learning
		// neigh -1 : 1D, -2 : diamond

		if (printNetwork) {
			KohonenNetwork.printNetwork(net);
		}

		net.epochLearn(phraseParList, printEpochs, inputsList);

		if (printNetwork) {
			KohonenNetwork.printNetwork(net);
		}
		// printing result
		System.out.println();
		iter = 1;
		for (float[] it : inputsList) {
			sRes = "";
			result = net.calculate(it);
			float min = result[0];
			float max = result[0];
			for (int i = 0; i < result.length; ++i) {
				min = Math.min(min, result[i]);
				max = Math.max(max, result[i]);
			}
			for (int i = 0; i < result.length; ++i) {
				sRes += "\t" + (int) (result[i] * 100 / (max - min));
			}
			switch (iter) {

			case (1):
				System.out.println("\\ ->" + sRes);
				break;
			case (2):
				System.out.println("| ->" + sRes);
				break;
			case (3):
				System.out.println("+ ->" + sRes);
				break;
			default:
				System.out.println("@ ->" + sRes);
				break;
			}
			++iter;
		}
	}

	protected void epochLearn(ArrayList<KohPhraseParameters> phraseParList, boolean printEpochs,
			ArrayList<float[]> inputsList) throws Exception {
		KohonenNetwork net = this;
		int phrase = -1;
		for (int epoch = 0; epoch < phraseParList.get(3).epochThred; ++epoch) {
			if (epoch % 1000 == 0 && printEpochs) {
				System.out.println(epoch + ", ");
			}
			if (epoch < phraseParList.get(0).epochThred) {
				if (phrase != 0) {
					net.layerList.get(1).setNeighborhoodSize(phraseParList.get(0).neighCount);
					phrase = 0;
				}
				net.kohLearnSpeed = phraseParList.get(0).kohLearnSpeed;
			} else if (epoch < phraseParList.get(1).epochThred) {
				if (phrase != 1) {
					net.layerList.get(1).setNeighborhoodSize(phraseParList.get(1).neighCount);
					phrase = 1;
				}
				net.kohLearnSpeed = phraseParList.get(1).kohLearnSpeed;
			} else if (epoch < phraseParList.get(2).epochThred) {
				if (phrase != 2) {
					net.layerList.get(1).setNeighborhoodSize(phraseParList.get(2).neighCount);
					phrase = 2;
				}
				net.kohLearnSpeed = phraseParList.get(2).kohLearnSpeed;
			} else if (epoch < phraseParList.get(3).epochThred) {
				if (phrase != 3) {
					net.layerList.get(1).setNeighborhoodSize(phraseParList.get(3).neighCount);
					phrase = 3;
				}
				net.kohLearnSpeed = phraseParList.get(3).kohLearnSpeed;
			}
			net.learn(inputsList);
		}
	}

	public static void printNetwork(KohonenNetwork net) {
		int count = 0;
		for (Layer l : net.layerList) {
			System.out.println("layer: " + count + ", size: " + +l.size);
			if (l instanceof InputLayer) {
				System.out.println("\tinput layer");
			} else {
				for (Neuron n : l.getNeuronList()) {

					float[] w = n.getWeights();
					String s = "";
					for (int i = 0; i < w.length; ++i) {
						s = s + ", " + w[i];
					}
					System.out.println("\t" + s.substring(2) + ", bias:" + n.getBias());
				}
			}
			++count;
		}
	}

	public float[] calculate(float[] input) throws Exception {
		if (input.length != layerList.get(0).size) {
			throw new Exception("input size not match network input, " + input.length + " instead of "
					+ layerList.get(0).size);
		}
		float[] result = input;
		for (int it = 1; it < layerList.size(); ++it) {
			result = layerList.get(it).getResults(result);
		}
		return result;
	}

	// TODO
	/**
	 * Learn network on given inputs list.
	 * 
	 * @param inputs
	 *            the inputs
	 * @throws Exception
	 *             the exception
	 */
	public void learn(ArrayList<float[]> inputs) throws Exception {
		Layer layer = layerList.get(1);
		for (float[] inputIt : inputs) {
			int closest = getClosestNeuron(inputIt);
			moveNeurons(layer.getNeuronList().get(closest), inputIt);
		}
	}

	/**
	 * Moves neuron and its neighborhood regarding input.
	 * 
	 * @param neuron
	 *            the neuron
	 * @param input
	 *            the input
	 */
	protected void moveNeurons(Neuron neuron, float[] input) {
		int size = input.length;
		// System.out.println(size);
		// System.out.println(neuron.getWeights().length);
		float[] newNeights = new float[size];
		float[] oldWeights = neuron.getWeights();
		for (int i = 0; i < size; ++i) {
			// + bo input - old
			newNeights[i] = oldWeights[i] + (kohLearnSpeed * (input[i] - oldWeights[i]));
		}
		neuron.setWeights(newNeights);
		if (weightNeigh) {
			findNewNeighborhood(neuron);
		}
		for (Pair<Neuron, Float> neighbourd : neuron.getNeighbours()) {
			newNeights = new float[input.length];
			oldWeights = neighbourd.getLeft().getWeights();
			for (int i = 0; i < size; ++i) {
				newNeights[i] = oldWeights[i] + (kohLearnSpeed * neighbourd.getRight() * (input[i] - oldWeights[i]));
			}
			neighbourd.getLeft().setWeights(newNeights);
		}
	}

	private void findNewNeighborhood(Neuron neuron) {
		Layer layer = layerList.get(1);
		int it = 0;
		ArrayList<Neuron> nList = layer.getNeuronList();
		ArrayList<Pair<Neuron, Float>> newNeigh = new ArrayList<>();
		;
		Neuron tmpNeuron;
		while (it < nList.size() && it <= layer.getNeighborhoodSize()) {
			tmpNeuron = nList.get(it);
			if (tmpNeuron == neuron) {
				++it;
				continue;
			}
			newNeigh.add(Pair.of(tmpNeuron, inputNeuronDistance(neuron.getWeights(), tmpNeuron.getWeights())));
			++it;
		}
		for (; it < nList.size(); ++it) {
			tmpNeuron = nList.get(it);
			float tmpDist = inputNeuronDistance(neuron.getWeights(), tmpNeuron.getWeights());
			int smallest = getSmallest(newNeigh);
			if (tmpDist < newNeigh.get(smallest).getRight()) {
				newNeigh.remove(smallest);
				newNeigh.add(Pair.of(tmpNeuron, tmpDist));
			}
		}
		neuron.setNeighbours(newNeigh);
	}

	private int getSmallest(ArrayList<Pair<Neuron, Float>> list) {
		float min = list.get(0).getRight();
		int minID = 0;
		for (int i = 1; i < list.size(); ++i) {
			if (list.get(i).getRight() < min) {
				min = list.get(i).getRight();
				minID = i;
			}
		}
		return minID;
	}

	protected int getClosestNeuron(float[] input) {
		ArrayList<Neuron> neurons = layerList.get(1).getNeuronList();
		int closest = 0;
		float minDist = inputNeuronDistance(input, neurons.get(0).getWeights());
		for (int nIt = 1; nIt < neurons.size(); ++nIt) {
			float tmpDist = inputNeuronDistance(input, neurons.get(nIt).getWeights());
			if (tmpDist < minDist) {
				closest = nIt;
				minDist = tmpDist;
			}
		}
		return closest;
	}

	/**
	 * distance between input vector and given neuron
	 * 
	 * @param v1
	 *            the v1
	 * @param v2
	 *            the v2
	 * @return the float
	 */
	private float inputNeuronDistance(float[] v1, float[] v2) {
		if (v1.length != v2.length) {
			throw new RuntimeException("different vector sizes");
		}
		float sum = 0;
		for (int i = 0; i < v1.length; ++i) {
			// euclidean
			sum += (v1[i] - v2[i]) * (v1[i] - v2[i]);
		}
		return (float) Math.sqrt(sum);
	}

	/**
	 * Neurons distance based on distance from layer creating.
	 * 
	 * @param dist
	 *            how far neighbourd is from neuron
	 * @return the distance from neurons
	 */
	public static float neuronsDistance(int dist) {
		// return 1/dist;
		return (float) Math.exp(-dist * dist);
	}

	public static float neuronsDistance(float dist) {
		// return 1/dist;
		return (float) Math.exp(-dist * dist);
	}

	/**
	 * reads layers configuration from file. first n lines represents number of
	 * layers. Each line should contains one number with number of neurons.
	 * Optionally line can contain this layer type of activation function.
	 * Strings with act. func. names are specified in ActivationFunctionFactory
	 * class.
	 * 
	 * After layers specification file must contain line with key word "END".
	 * Every next line contain weights of each layer (without input layer)
	 * neurons. E.g. if network contains input layer and 2 other layers, first
	 * with 2 neurons and second with 4 neurons, after line with keyword "END"
	 * file must contains 6 lines. Two with <input_size> float weights, and four
	 * with two float weights. Optionally, bias for this neuron can be specified
	 * as additional number at the end of line.
	 * 
	 * @param path
	 *            path to file
	 * @throws IOException
	 *             Signals that an I/O exception has occurred or file don't have
	 *             proper format.
	 */
	// TODO - min 1 layer
	public void readConfFile(String path) throws IOException {
		DataInputStream in;
		FileInputStream fstream;
		Layer layer;
		int lineCount = 1;
		try {
			fstream = new FileInputStream(path);
			in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));
			String line;
			line = br.readLine();
			// reading layers parameters (size, bias activation function)
			while (line != null) {
				if (line.equals("")) {
					line = br.readLine();
					++lineCount;
					continue;
				}
				if (line.contains("END")) {
					break;
				}
				String[] lineArray = line.split("\\s+");
				switch (lineArray.length) {
				case 1:
					layer = new Layer(Integer.parseInt(lineArray[0]));
					break;
				case 2:
					layer = new Layer(Integer.parseInt(lineArray[0]), lineArray[1]);
					break;
				case 0:
					// TODO - chceck if ok (continue)
					line = br.readLine();
					++lineCount;
					continue;
				default:
					throw new IOException("illegal arguments or to many arguments in line:" + lineCount);
				}
				layerList.add(layer);
				line = br.readLine();
				++lineCount;
			}

			if (line == null) {
				throw new IOException("no \"END\" line");
			}
			Layer firstLayer = layerList.remove(0);
			layerList.add(0, new InputLayer(firstLayer));

			line = br.readLine();
			++lineCount;
			int layerCounter = 1;
			// reading neurons weights
			while (line != null) {
				if (layerCounter > layerList.size()) {
					throw new IOException("to many weights lines, instead of " + layerList.size());
				}

				if (line.split("\\s+").length == 0 || line.equals("")) {
					line = br.readLine();
					++lineCount;
					continue;
				}
				int layerSize = layerList.get(layerCounter).size;
				int prevLayerSize = layerList.get(layerCounter - 1).size;
				// one layer's neurons
				for (int neuronIt = 0; neuronIt < layerSize; ++neuronIt) {
					String[] lineArray = line.split("\\s+");
					if (prevLayerSize != lineArray.length && (prevLayerSize + 1) != lineArray.length) {
						throw new IOException("number of weights in line " + lineCount + " do not correspond to layer "
								+ (layerCounter - 1) + " size, layer " + layerCounter + " neurons should have "
								+ prevLayerSize + " weights");
					}
					float[] weightsArray = new float[prevLayerSize];
					for (int it = 0; it < prevLayerSize; ++it) {
						weightsArray[it] = Float.parseFloat(lineArray[it]);
					}
					layerList.get(layerCounter).setNeuronWeights(neuronIt, weightsArray);
					if ((prevLayerSize + 1) == lineArray.length) {
						layerList.get(layerCounter).setNeuronBias(neuronIt, Float.parseFloat(lineArray[prevLayerSize]));
					}
					++lineCount;
					line = br.readLine();
				}

				++layerCounter;
			}
			if (layerCounter != layerList.size()) {
				throw new IOException("to few weights lines " + layerCounter + " instead of " + layerList.size());
			}

			in.close();
		} catch (IOException e) {
			// System.err.println("Error while reading file");
			throw e;
		}
	}

	public void readConfFile2(String path) throws IOException {
		DataInputStream in;
		FileInputStream fstream;
		Layer layer;
		int lineCount = 1;
		try {
//			fstream = new FileInputStream(path);
//			in = new DataInputStream(fstream);
//			BufferedReader br = new BufferedReader(new InputStreamReader(in));
			//TODO
			String input = "3\n"
					+"9 LIN \n"
					+"1 HARDLIM \n"
					+"END\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.57 0.57 0.57\n"
					+"0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5";
			BufferedReader br = new BufferedReader(new StringReader(input));
			String line;
			line = br.readLine();
			// reading layers parameters (size, bias activation function)
			while (line != null) {
				if (line.startsWith("#")) {
					line = br.readLine();
					continue;
				}
				if (line.equals("")) {
					line = br.readLine();
					++lineCount;
					continue;
				}
				if (line.contains("END")) {
					break;
				}
				String[] lineArray = line.split("\\s+");
				switch (lineArray.length) {
				case 1:
					layer = new Layer(Integer.parseInt(lineArray[0]));
					break;
				case 2:
					layer = new Layer(Integer.parseInt(lineArray[0]), lineArray[1]);
					break;
				case 0:
					// TODO - chceck if ok (continue)
					line = br.readLine();
					++lineCount;
					continue;
				default:
					throw new IOException("illegal arguments or to many arguments in line:" + lineCount);
				}
				layerList.add(layer);
				line = br.readLine();
				++lineCount;
			}
			if (line == null) {
				throw new IOException("no \"END\" line");
			}
			Layer firstLayer = layerList.remove(0);
			layerList.add(0, new InputLayer(firstLayer));

			line = br.readLine();
			++lineCount;
			int layerCounter = 1;

			// reading neurons weights
			while (line != null) {
				if (line.startsWith("#")) {
					line = br.readLine();
					continue;
				}
				if (layerCounter > layerList.size()) {
					throw new IOException("to many weights lines, instead of " + layerList.size());
				}

				if (line.split("\\s+").length == 0 || line.equals("")) {
					line = br.readLine();
					++lineCount;
					continue;

				}
				int layerSize = layerList.get(layerCounter).size;
				int prevLayerSize = layerList.get(layerCounter - 1).size;

				if (line.split("\\s+").length == 1) {

					for (int neuronIt = 0; neuronIt < layerSize; ++neuronIt) {

						String[] lineArray = line.split("\\s+");
						float[] weights = new float[prevLayerSize];

						for (int it = 0; it < prevLayerSize; ++it) {
							weights[it] = 0;
						}

						layerList.get(layerCounter).setNeuronWeights(neuronIt, weights);
						layerList.get(layerCounter).setNeuronBias(neuronIt, Float.parseFloat(lineArray[0]));
						line = br.readLine();
						++lineCount;
					}

				} else if (line.split("\\s+").length == 666) {
					for (int neuronIt = 0; neuronIt < layerSize; ++neuronIt) {
						String[] lineArray = line.split("\\s+");
						// System.out.println(line);
						float[] weights = new float[prevLayerSize];
						float[] args = new float[2];
						for (int it = 1; it < lineArray.length; it++) {
							args[it - 1] = Float.parseFloat(lineArray[it]);
						}
						float min, max;

						if (args[0] > args[1]) {
							max = args[0];
							min = args[1];
						} else {
							max = args[1];
							min = args[0];
						}

						for (int it = 0; it < prevLayerSize; ++it) {
							weights[it] = (float) (min + (Math.random() * (max - min)));

						}

						layerList.get(layerCounter).setNeuronWeights(neuronIt, weights);
						layerList.get(layerCounter).setNeuronBias(neuronIt, Float.parseFloat(lineArray[0]));
						line = br.readLine();
						++lineCount;
					}

				}else{
					// one layer's neurons
					for (int neuronIt = 0; neuronIt < layerSize; ++neuronIt) {
						String[] lineArray = line.split("\\s+");
						if (prevLayerSize != lineArray.length
								&& (prevLayerSize + 1) != lineArray.length) {
							throw new IOException("number of weights in line "
									+ lineCount + " do not correspond to layer "
									+ (layerCounter - 1) + " size, layer "
									+ layerCounter + " neurons should have "
									+ prevLayerSize + " weights");
						}
						float[] weightsArray = new float[prevLayerSize];
						for (int it = 0; it < prevLayerSize; ++it) {
							weightsArray[it] = Float.parseFloat(lineArray[it]);
						}
						layerList.get(layerCounter).setNeuronWeights(neuronIt,
								weightsArray);
						if ((prevLayerSize + 1) == lineArray.length) {
							layerList.get(layerCounter).setNeuronBias(neuronIt,
									Float.parseFloat(lineArray[prevLayerSize]));
						}
						++lineCount;
						line = br.readLine();
					}

					
				}
//					else {
//				
//					throw new IOException("too many arguments in line: "+line);
//				}
				++layerCounter;
			}
			if (layerCounter != layerList.size()) {
				throw new IOException("to few weights lines " + layerCounter + " instead of " + layerList.size());
			}
//TODO
//			in.close();
		} catch (IOException e) {
			// System.err.println("Error while reading file");
			throw e;
		}
		checkLayerCount();
	}

	protected void checkLayerCount() {
		if (layerList.size() != 2) {
			throw new RuntimeException("number of layers is not equal 2");
		}
	}
}
