/**
 * 
 */
package com.yullage.ann.network.som;

import java.util.ArrayList;
import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.yullage.ann.common.Util;
import com.yullage.ann.core.BiasNeuron;
import com.yullage.ann.core.Connection;
import com.yullage.ann.core.Layer;
import com.yullage.ann.core.NeuralNetwork;
import com.yullage.ann.core.Neuron;
import com.yullage.ann.data.DataSetType;
import com.yullage.ann.data.SomDataManager;

/**
 * @author Yu-chun Huang
 * 
 */
public class SomNetwork implements NeuralNetwork {

	private static final Logger logger = LoggerFactory.getLogger(SomNetwork.class);

	private List<Layer> layers;

	private SomParameters params;
	private SomDataManager dataManager;

	public SomNetwork(SomParameters networkParameters) {
		this.params = networkParameters;
	}

	@Override
	public void initialize() {
		logger.info("[BEGIN] Initializing SOM neural network.");
		logger.info(params.toString());

		// Construct network
		logger.info("Constructing network...");
		List<Integer> structure = params.getNetworkStructure();

		if (structure.size() <= 1) {
			throw new IllegalArgumentException();
		}

		layers = new ArrayList<Layer>();

		// Input layer
		Layer layer = new Layer(structure.get(0), false);
		layers.add(layer);

		// All other layers
		for (int i = 1; i < structure.size(); i++) {
			layer = new Layer(structure.get(i), true);
			layers.add(layer);
		}

		// Connect all layers
		for (int i = structure.size() - 1; i > 0; i--) {
			layers.get(i).addInLayer(layers.get(i - 1));
		}

		// Initialize random weights
		logger.info("Initializing random weights...");
		for (int i = 1; i < structure.size(); i++) {
			for (Neuron neuron : layers.get(i).getNeurons()) {
				List<Connection> connections = neuron.getAllInConnections();
				for (Connection conn : connections) {
					conn.setWeight(Util.getRandom());
				}
			}
		}

		// Load data set
		logger.info("Loading data points and similarity matrix...");
		dataManager = new SomDataManager();
		dataManager.loadData(DataSetType.TRAINING, params.getTrainingDataFileName());
		dataManager.loadSimilarityMatrix(DataSetType.TRAINING, params.getTrainingSimilarityFileName());
		logger.info("Data set size: " + dataManager.getDataSize(DataSetType.TRAINING) + "; data dimension: "
				+ dataManager.getDataDimension());

		logger.info("[ END ] Initializing SOM neural network.");
	}

	@Override
	public long train() {
		long totalEpochs = 0;

		logger.info("[BEGIN] Training SOM neural network.");
		for (int i = 1; i < layers.size(); i++) {
			logger.info("Training layer " + i + "...");
			int epochs = trainLayer(i, params.getMaxEpochsPerLayer());
			totalEpochs += epochs;
		}

		logger.info("[ END ] Training SOM neural network. (Total epochs: " + totalEpochs + ")");

		return totalEpochs;
	}

	@Override
	public List<Double> getOutputVector(List<Double> input) {
		activate(layers.size() - 1, input);
		return layers.get(layers.size() - 1).getOutputVector();
	}

	private int trainLayer(int layerIndex, int maxEpochs) {
		int curEpoch = 0;

		List<List<Integer>> intraClassPair = dataManager.getIntraClassPairs(DataSetType.TRAINING);
		List<List<Integer>> interClassPair = dataManager.getInterClassPairs(DataSetType.TRAINING);

		int intraPairCount = intraClassPair.size();
		int interPairCount = interClassPair.size();

		for (int e = 0; e < maxEpochs; e++) {
			double maxIntraDistance = Double.NEGATIVE_INFINITY;
			double minInterDistance = Double.POSITIVE_INFINITY;

			int maxIntraIndex = 0;
			int minInterIndex = 0;

			// Find the index of the pair with maximum intra-class distance
			for (int i = 0; i < intraPairCount; i++) {
				int index1 = intraClassPair.get(i).get(0);
				int index2 = intraClassPair.get(i).get(1);

				double distance = calcDistance(layerIndex, index1, index2);
				if (distance > maxIntraDistance) {
					maxIntraDistance = distance;
					maxIntraIndex = i;
				}
			}
			if (e % 1000 == 0) {
				logger.debug("Intra Idx: " + maxIntraIndex + "; Dst: " + maxIntraDistance);
			}

			// Find the index of the pair with minimum inter-class distance
			for (int i = 0; i < interPairCount; i++) {
				int index1 = interClassPair.get(i).get(0);
				int index2 = interClassPair.get(i).get(1);

				double distance = calcDistance(layerIndex, index1, index2);
				if (distance < minInterDistance) {
					minInterDistance = distance;
					minInterIndex = i;
				}
			}
			if (e % 1000 == 0) {
				logger.debug("                                         Inter Idx: " + minInterIndex + "; Dst: "
						+ minInterDistance);
			}

			// Calculate attraction gradient
			int pIdx = intraClassPair.get(maxIntraIndex).get(0);
			int qIdx = intraClassPair.get(maxIntraIndex).get(1);
			List<List<Double>> attGradient = calcGradient(layerIndex, pIdx, qIdx);

			// Calculate repulsion gradient
			pIdx = interClassPair.get(minInterIndex).get(0);
			qIdx = interClassPair.get(minInterIndex).get(1);
			List<List<Double>> repGradient = calcGradient(layerIndex, pIdx, qIdx);

			// Adjust weights
			List<Neuron> neuThis = layers.get(layerIndex).getNeurons();
			List<Neuron> neuPre = layers.get(layerIndex - 1).getNeurons();
			int nThis = neuThis.size();
			int nPre = neuPre.size();

			double w;
			Connection conn;
			for (int i = 0; i < nThis; i++) {
				// For normal neurons
				for (int j = 0; j < nPre; j++) {
					conn = neuThis.get(i).getInConnection(neuPre.get(j).id);
					w = conn.getWeight();
					w = w - params.getAttractionForce() * attGradient.get(i).get(j);
					w = w + params.getRepulsionForce() * repGradient.get(i).get(j);
					conn.setWeight(w);
				}

				// For bias neuron
				conn = neuThis.get(i).getInConnection(BiasNeuron.getInstance().id);
				w = conn.getWeight();
				w = w - params.getAttractionForce() * attGradient.get(i).get(nPre);
				w = w + params.getRepulsionForce() * repGradient.get(i).get(nPre);
				conn.setWeight(w);
			}

			curEpoch++;
		}

		return curEpoch;
	}

	private List<List<Double>> calcGradient(int layerIndex, int pIdx, int qIdx) {
		List<Double> pInput = dataManager.getDataByIndex(DataSetType.TRAINING, pIdx);
		List<Double> qInput = dataManager.getDataByIndex(DataSetType.TRAINING, qIdx);

		activate(layerIndex, pInput);
		List<Double> pThis = layers.get(layerIndex).getOutputVector();
		List<Double> pPre = layers.get(layerIndex - 1).getOutputVector();

		activate(layerIndex, qInput);
		List<Double> qThis = layers.get(layerIndex).getOutputVector();
		List<Double> qPre = layers.get(layerIndex - 1).getOutputVector();

		int nThis = pThis.size();
		int nPre = pPre.size();

		List<List<Double>> gradient = new ArrayList<List<Double>>();
		for (int i = 0; i < nThis; i++) {
			List<Double> row = new ArrayList<Double>();

			double a = (pThis.get(i) - qThis.get(i)) * (1 - Math.pow(pThis.get(i), 2));
			double b = (pThis.get(i) - qThis.get(i)) * (1 - Math.pow(qThis.get(i), 2));
			for (int j = 0; j < nPre; j++) {
				row.add(a * pPre.get(j) - b * qPre.get(j));
			}
			row.add(b - a);

			gradient.add(row);
		}

		return gradient;
	}

	private void activate(int stopLayerIndex, List<Double> input) {
		List<Neuron> inputNeurons = layers.get(0).getNeurons();
		for (int i = 0; i < inputNeurons.size(); i++) {
			inputNeurons.get(i).setOutput(input.get(i));
		}

		for (int i = 1; i <= stopLayerIndex; i++) {
			List<Neuron> neurons = layers.get(i).getNeurons();
			for (Neuron neuron : neurons) {
				neuron.calculateOutput();
			}
		}
	}

	private double calcDistance(int layerIndex, int index1, int index2) {
		// Get input data
		List<Double> data1 = dataManager.getDataByIndex(DataSetType.TRAINING, index1);
		List<Double> data2 = dataManager.getDataByIndex(DataSetType.TRAINING, index2);

		// Get the output vector for the first input
		activate(layerIndex, data1);
		List<Double> output1 = layers.get(layerIndex).getOutputVector();

		// Get the output vector for the second input
		activate(layerIndex, data2);
		List<Double> output2 = layers.get(layerIndex).getOutputVector();

		// Calculate squared Euclidean distance
		return Util.calcSquaredEuclideanDistance(output1, output2);
	}

}
