package de.distMLP.train;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hama.bsp.BSPPeer;
import org.apache.hama.bsp.sync.SyncException;

import de.distMLP.data.TrainingExample;
import de.distMLP.messages.HamaMessage;
import de.distMLP.messages.WeightMatrixMessage;
import de.mlp_distributed.mlp.math.mahout.Matrix;

public class MLP_MiniBatchGD extends Base_MLP_Trainer {

	private static final Log LOG = LogFactory.getLog(MLP_MiniBatchGD.class);

	private long averageCalculateGradientTime = 0;
	private long maxCalculateGradientTime = 0;
	private long minCalculateGradientTime = Integer.MAX_VALUE;

	private final long averageSendWeightAverageOnePeer = 0;
	private final long maxSendWeightAverageOnePeer = 0;
	private final long minSendWeightAverageOnePeer = Integer.MAX_VALUE;

	private long averageDistributeWeightAverageToAllPeers = 0;
	private long maxDistributeWeightAverageToAllPeers = 0;
	private long minDistributeWeightAverageToAllPeers = Integer.MAX_VALUE;

	private long averageCalculateAverageWeightMaster = 0;
	private long maxCalculateAverageWeightMaster = 0;
	private long minCalculateAverageWeightMaster = Integer.MAX_VALUE;

	private long averageSendWeightUpdate = 0;
	private long maxSendWeightUpdate = 0;
	private long minSendWeightUpdate = Integer.MAX_VALUE;

	private long maxApplyNewWeightToMatrix = 0;
	private long minApplyNewWeightToMatrix = Integer.MAX_VALUE;
	private long averageApplyNewWeightToMatrix = 0;

	private long maxIterationDuration = 0;
	private long minIterationDuration = Integer.MAX_VALUE;
	private long averageIterationDuration = 0;

	private final List<Long> iterationDurationMeasuresList = new ArrayList<Long>();

	@SuppressWarnings("null")
	@Override
	public void bsp(final BSPPeer<LongWritable, Text, NullWritable, Text, HamaMessage> peer) throws IOException, SyncException,
			InterruptedException {

		final int totalBatchSize = Base_MLP_Trainer.getTotalBatchSize(peer);

		if (this.useCache) {
			final TrainingExample trainingExample = new TrainingExample();
			while (this.readNextTrainingExample(peer, trainingExample)) {
				// load cache
			}
			this.reopenInput(peer);
		}

		for (int iterationNumber = 0; iterationNumber < this.nbIterations; iterationNumber++) {
			if (peer.getNumCurrentMessages() > 0) {
				break;
			}

			final long startIteration = System.currentTimeMillis();
			this.reopenInput(peer);
			while (true) {
				final long startCalculateGradient = System.currentTimeMillis();
				final WeightMatrixMessage gradientSumMsg = this.calculateGradient(peer, totalBatchSize);
				/**
				 * Measure duration.
				 */
				this.measureGradientCalculation(startCalculateGradient, iterationNumber);

				final WeightMatrixMessage averageSumMsg = this.sendAverageWeightsToMaster(gradientSumMsg, iterationNumber);

				/**
				 * Merges the gradients sent by all clients, calculates the
				 * average and the weight updates. Sends the new weights to all
				 * clients.
				 */
				final long startCalculateAverageWeightMaster = System.currentTimeMillis();
				WeightMatrixMessage msg = null;
				if (peer.getPeerName().equals(this.masterTask)) {
					msg = this.calculateWeightUpdate(peer, averageSumMsg);
				}

				this.measureCalculateAverageWeightMaster(startCalculateAverageWeightMaster, iterationNumber);

				msg = this.sendWeightUpdateToAllPeers(peer, msg, iterationNumber);

				/**
				 * Applies the new weight matrices sent by the master to the
				 * local MLP.
				 */
				final long startApplyNewWeightToMatrix = System.currentTimeMillis();

				if (peer.getNumCurrentMessages() > 0) {
					msg = (WeightMatrixMessage) peer.getCurrentMessage();
				}
				if (msg != null) {
					this.mlp.initWeights(msg.getMatrix());
				} else {
					MLP_MiniBatchGD.LOG.error("Peer: " + peer.getPeerName() + " (" + this.peerToIntMap.get(peer.getPeerName()) + ")"
							+ " no weight update found!");
				}
				/**
				 * Measure
				 */
				this.measureApplyNewWeightToMatrix(startApplyNewWeightToMatrix, iterationNumber);

				if (this.mockupTest) {
					final String logText = "Matrix(0,0): " + this.mlp.getWeights()[0].get(0, 0);
					MLP_MiniBatchGD.LOG.info(logText);
					peer.write(NullWritable.get(), new Text(logText));
				}

				if (msg.isStopTraining()) {
					/**
					 * Only called when training iterated over all training
					 * examples.
					 */
					/**
					 * Calculate error and start new iteration.
					 */
					if ((this.batchLearning == false) && this.calculateErrorOnEachIteration) {
						/**
						 * Online learning changed the weight matrices after
						 * each training set. Average error is now calculated
						 * over all training examples.
						 */
						this.calculateAndWriteCost(peer, this.masterTask, this.mlp, totalBatchSize);
					}
					break;
				}
			}
			this.measureIterationDuration(startIteration, iterationNumber);
			if (this.targetError > 0) {
				// Stop training
				if (peer.getPeerName().equals(this.masterTask) && (this.error < this.targetError)) {
					for (final String peerName : peer.getAllPeerNames()) {
						peer.send(peerName, new WeightMatrixMessage(null, true, 0));
					}

				}
				peer.sync();
			}
		}
		if ((this.calculateErrorOnEachIteration == false)) {
			this.calculateAndWriteCost(peer, this.masterTask, this.mlp, totalBatchSize);
		}
		this.writeMeasures(peer);
	}

	public WeightMatrixMessage sendAverageWeightsToMaster(final WeightMatrixMessage gradientSumMsg, final int iterationNumber)
			throws IOException, SyncException, InterruptedException {
		final long startDistributeWeightAverageToAllPeers = System.currentTimeMillis();

		final WeightMatrixMessage result = (WeightMatrixMessage) this.messageDistributor.distributeAndAggegateMessageToMasterPeer(
				gradientSumMsg, this.masterTask);
		/**
		 * Measure duration.
		 */
		this.measureDistributeWeightAverageToAllPeers(startDistributeWeightAverageToAllPeers, iterationNumber);
		return result;
	}

	private WeightMatrixMessage sendWeightUpdateToAllPeers(final BSPPeer<LongWritable, Text, NullWritable, Text, HamaMessage> peer,
			final WeightMatrixMessage msg1, final int iterationNumber) throws IOException, SyncException, InterruptedException {
		/**
		 * Send weight update from master to all clients.
		 */
		final long startSendWeightUpdate = System.currentTimeMillis();

		final HamaMessage result = this.messageDistributor.distributeMessageToAllPeers(this.masterTask, msg1, peer.getAllPeerNames());

		/**
		 * Measure
		 */
		this.measureSendWeightUpdate(startSendWeightUpdate, iterationNumber);

		return (WeightMatrixMessage) result;
	}

	private void writeMeasures(final BSPPeer<LongWritable, Text, NullWritable, Text, HamaMessage> peer) throws IOException {
		if (!peer.getPeerName().equals(this.masterTask)) {
			return;
		}
		// first iteration is not measured
		final long iterationCount1 = this.nbIterations - 1;
		peer.write(NullWritable.get(), new Text("CalculateGradientTime: " + this.minCalculateGradientTime + " | "
				+ this.maxCalculateGradientTime + " | " + (this.averageCalculateGradientTime / iterationCount1)));

		peer.write(NullWritable.get(), new Text("SendWeightAverageOnePeer: " + this.minSendWeightAverageOnePeer + " | "
				+ this.maxSendWeightAverageOnePeer + " | " + (this.averageSendWeightAverageOnePeer / iterationCount1)));

		peer.write(NullWritable.get(), new Text("DistributeWeightAverageToAllPeers: " + this.minDistributeWeightAverageToAllPeers + " | "
				+ this.maxDistributeWeightAverageToAllPeers + " | " + (this.averageDistributeWeightAverageToAllPeers / iterationCount1)));

		peer.write(NullWritable.get(), new Text("CalculateAverageWeightMaster: " + this.minCalculateAverageWeightMaster + " | "
				+ this.maxCalculateAverageWeightMaster + " | " + (this.averageCalculateAverageWeightMaster / iterationCount1)));

		peer.write(NullWritable.get(), new Text("SendWeightUpdate: " + this.minSendWeightUpdate + " | " + this.maxSendWeightUpdate + " | "
				+ (this.averageSendWeightUpdate / iterationCount1)));

		peer.write(NullWritable.get(), new Text("SendWeightUpdate: " + this.minApplyNewWeightToMatrix + " | "
				+ this.maxApplyNewWeightToMatrix + " | " + (this.averageApplyNewWeightToMatrix / iterationCount1)));

		final long average = this.averageIterationDuration / iterationCount1;
		final long standardDeviation = this.computeStandardDeviation(average);
		final double unsecurity = standardDeviation / Math.sqrt(this.iterationDurationMeasuresList.size());

		peer.write(NullWritable.get(), new Text("IterationDuration: " + this.minIterationDuration + " | " + this.maxIterationDuration
				+ " | Average: " + (this.averageIterationDuration / iterationCount1) + " | standardDeviation: " + standardDeviation
				+ " | unsecurity: " + unsecurity));

		peer.write(NullWritable.get(), new Text("CalculateCost: " + this.minCalculateCost + " | " + this.maxCalculateCost + " | "
				+ (this.averageCalculateCost / iterationCount1)));
	}

	private long computeStandardDeviation(final long average) {
		if (this.iterationDurationMeasuresList.size() == 0) {
			return 0;
		}
		long sum = 0;
		for (int i = 0; i < this.iterationDurationMeasuresList.size(); i++) {
			sum += Math.pow(this.iterationDurationMeasuresList.get(i).longValue() - average, 2);
		}
		if (this.iterationDurationMeasuresList.size() > 1) {
			sum = (sum) / (this.iterationDurationMeasuresList.size() - 1);
		}
		return (long) Math.sqrt(sum);
	}

	private void measureDistributeWeightAverageToAllPeers(final long startDistributeWeightAverageToAllPeers, final int iterationNumber) {
		if (iterationNumber == 0) {
			return;
		}

		final long distributeWeightAverageToAllPeers = System.currentTimeMillis() - startDistributeWeightAverageToAllPeers;
		if (distributeWeightAverageToAllPeers > this.maxDistributeWeightAverageToAllPeers) {
			this.maxDistributeWeightAverageToAllPeers = distributeWeightAverageToAllPeers;
		}
		if (distributeWeightAverageToAllPeers < this.minDistributeWeightAverageToAllPeers) {
			this.minDistributeWeightAverageToAllPeers = distributeWeightAverageToAllPeers;
		}
		this.averageDistributeWeightAverageToAllPeers += distributeWeightAverageToAllPeers;
	}

	private void measureGradientCalculation(final long startCalculateGradient, final int iterationNumber) {
		if (iterationNumber == 0) {
			return;
		}

		final long calculateGradientDuration = System.currentTimeMillis() - startCalculateGradient;
		if (calculateGradientDuration > this.maxCalculateGradientTime) {
			this.maxCalculateGradientTime = calculateGradientDuration;
		}
		if (calculateGradientDuration < this.minCalculateGradientTime) {
			this.minCalculateGradientTime = calculateGradientDuration;
		}
		this.averageCalculateGradientTime += calculateGradientDuration;
	}

	private void measureIterationDuration(final long startIteration, final int iterationNumber) {
		if (iterationNumber == 0) {
			return;
		}
		final Long iteration = new Long(System.currentTimeMillis() - startIteration);
		this.iterationDurationMeasuresList.add(iteration);
		if (iteration.longValue() > this.maxIterationDuration) {
			this.maxIterationDuration = iteration.longValue();
		}
		if (iteration.longValue() < this.minIterationDuration) {
			this.minIterationDuration = iteration.longValue();
		}
		this.averageIterationDuration += iteration.longValue();
	}

	private void measureCalculateAverageWeightMaster(final long startCalculateAverageWeightMaster, final int iterationNumber) {
		if (iterationNumber == 0) {
			return;
		}
		final long calculateAverageWeightMaster = System.currentTimeMillis() - startCalculateAverageWeightMaster;
		if (calculateAverageWeightMaster > this.maxCalculateAverageWeightMaster) {
			this.maxCalculateAverageWeightMaster = calculateAverageWeightMaster;
		}
		if (calculateAverageWeightMaster < this.minCalculateAverageWeightMaster) {
			this.minCalculateAverageWeightMaster = calculateAverageWeightMaster;
		}
		this.averageCalculateAverageWeightMaster += calculateAverageWeightMaster;
	}

	private void measureSendWeightUpdate(final long startSendWeightUpdate, final int iterationNumber) {
		if (iterationNumber == 0) {
			return;
		}
		final long sendWeightUpdate = System.currentTimeMillis() - startSendWeightUpdate;
		if (sendWeightUpdate > this.maxSendWeightUpdate) {
			this.maxSendWeightUpdate = sendWeightUpdate;
		}
		if (sendWeightUpdate < this.minSendWeightUpdate) {
			this.minSendWeightUpdate = sendWeightUpdate;
		}
		this.averageSendWeightUpdate += sendWeightUpdate;
	}

	private void measureApplyNewWeightToMatrix(final long startApplyNewWeightToMatrix, final int iterationNumber) {
		if (iterationNumber == 0) {
			return;
		}
		final long applyNewWeightToMatrix = System.currentTimeMillis() - startApplyNewWeightToMatrix;
		if (applyNewWeightToMatrix > this.maxApplyNewWeightToMatrix) {
			this.maxApplyNewWeightToMatrix = applyNewWeightToMatrix;
		}
		if (applyNewWeightToMatrix < this.minApplyNewWeightToMatrix) {
			this.minApplyNewWeightToMatrix = applyNewWeightToMatrix;
		}
		this.averageApplyNewWeightToMatrix += applyNewWeightToMatrix;
	}

	private WeightMatrixMessage calculateGradient(final BSPPeer<LongWritable, Text, NullWritable, Text, HamaMessage> peer,
			final int totalBatchSize) throws IOException {
		final Matrix[] gradients = new Matrix[this.mlp.getWeights().length];
		boolean stopTraining = false;
		int numberOfItems = 0;

		final TrainingExample trainingExample = new TrainingExample();

		// total sum of errors
		double costSum = 0;
		// number ob errors added to the sum
		int nbCosts = 0;
		int errors = 0;

		while ((numberOfItems < this.batchSize) || (this.batchSize == 0)) {
			if (this.readNextTrainingExample(peer, trainingExample)) {
				numberOfItems++;
				final Matrix[] deltaWs = Training_Helper.calculateGradient(trainingExample, this.mlp);
				Training_Helper.addWeightChanges(gradients, deltaWs);

				if (this.batchLearning && this.calculateErrorOnEachIteration) {
					// Calculate Cost
					final double tmpCost = Base_MLP_Trainer.calculateError(trainingExample, this.mlp);
					if (!Double.isInfinite(tmpCost) && !Double.isNaN(tmpCost)) {
						costSum += tmpCost;
						nbCosts++;
					} else {
						errors++;
					}
				}
			} else {
				stopTraining = true;
				break;
			}

		}
		if (peer.getPeerName().equals(this.masterTask) && this.batchLearning) {
			if (this.calculateErrorOnEachIteration) {
				MLP_MiniBatchGD.LOG.info("Cost calculated on " + nbCosts + " items. Errors " + errors);
				peer.write(NullWritable.get(), new Text("Cost calculated on " + nbCosts + " items. Errors " + errors));
			}
		}

		if (numberOfItems > 0) {
			if (this.batchLearning) {
				Training_Helper.divideMatrixByDouble(gradients, totalBatchSize);
			} else {
				Training_Helper.divideMatrixByDouble(gradients, this.batchSize * peer.getNumPeers());
			}
		}
		final WeightMatrixMessage msg;
		if (numberOfItems > 0) {
			double averageError = 0;
			if (nbCosts > 0) {
				averageError = costSum / totalBatchSize;
			}
			msg = new WeightMatrixMessage(gradients, stopTraining, averageError);
		} else {
			msg = new WeightMatrixMessage(null, stopTraining, 0);
		}
		return msg;
	}
}
