package lv.bond.science.nnstudio.core;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;

import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;

import lv.bond.ann.mlp.IActFunction;
import lv.bond.ann.mlp.IErrorCalculator;
import lv.bond.ann.mlp.MlpNet;
import lv.bond.ann.mlp.MlpNetFactory;
import lv.bond.ann.mlp.MlpNeuron;
import lv.bond.ann.mlp.MlpParamsContainer;
import lv.bond.ann.mlp.TargetValuesContainer;
import lv.bond.ann.mlp.impl.MlpNetFactoryImpl;
import lv.bond.ann.net.model.Layer;
import lv.bond.ann.net.model.NetFactory;
import lv.bond.ann.net.model.impl.NetFactoryImpl;
import lv.bond.science.nnstudio.model.ImagesBean;
import lv.bond.science.nnstudio.ui.errorgraph.ErrorGraphView;
import lv.bond.science.nnstudio.ui.testgraph.TestGraphView;
import lv.bond.science.nnstudio.ui.testresult.TestResultView;

public class MlpModel {
	
	private MlpNetFactory mlpFactory = new MlpNetFactoryImpl();
	private NetFactory netFactory = new NetFactoryImpl();
	private MlpNet mlpNet = mlpFactory.createMlpNet();
	private boolean isInitalized = false;
	private int epochsCount = 0;
	private ArrayList epochsErrors = new ArrayList();
	
	public MlpModel() { 
	}
	
	public void createMlpNet(NetworkParams params) {	
		this.epochsCount = params.getEpochsCount();
		// Prepare network
		
		// Each neurons IErrorCalculator must be conected to 
		// MlpNet -> TargetValuesContainer
		
		// Create Target Values Container
		TargetValuesContainer targetsContainer =
			mlpFactory.createTargetValuesContainer();
		mlpNet.setTargetValuesContainer( targetsContainer );
		
		// Create Params Container
		MlpParamsContainer paramsContainer = 
			mlpFactory.createMlpParamsContainer();
		paramsContainer.setLearnRate(params.getLearnRate());
		paramsContainer.setLearnRateDecreaseCoef(params.getLearnRateDecrease());
		paramsContainer.setSigmoidStepCoef(params.getSigmoidStep());
		mlpNet.setParamsContainer( paramsContainer );
		
		// Error calculators
		IErrorCalculator errCalculatorHidden = 
			mlpFactory.createHiddenErrorCalculator();
		errCalculatorHidden.setCalculatorName( "hidden" );
		errCalculatorHidden.setTargetValuesContainer( targetsContainer );
		mlpNet.getErrorCalculators().put(
				"hidden",
				errCalculatorHidden);
		
		IErrorCalculator errCalculatorOutput = mlpFactory.createOutputErrorCalculator();
		errCalculatorOutput.setCalculatorName( "output" );
		errCalculatorOutput.setTargetValuesContainer( targetsContainer );
		mlpNet.getErrorCalculators().put(
				"output",
				errCalculatorOutput);
		
		// ActFunctions
		IActFunction actFunctionPlain =
			mlpFactory.createPlainActFunction();
		actFunctionPlain.setNameOfFunction("plain");
		actFunctionPlain.setParamsContainer( paramsContainer );
		mlpNet.getActFunctions().put(
				"plain",
				actFunctionPlain);
		
		IActFunction actFunctionSigmoid =
			mlpFactory.createSigmoidActFunction();
		actFunctionSigmoid.setNameOfFunction("sigmoid");
		actFunctionSigmoid.setParamsContainer( paramsContainer );
		mlpNet.getActFunctions().put(
				"sigmoid",
				actFunctionSigmoid);
		
		//Create & register Layers
		Layer inputLayer = netFactory.createLayer();
		inputLayer.setLayerName( "input" );
		Layer biasLayer = netFactory.createLayer();
		biasLayer.setLayerName( "bias" );
		Layer hiddenLayer = netFactory.createLayer();
		hiddenLayer.setLayerName( "hidden" );
		Layer outputLayer = netFactory.createLayer();
		outputLayer.setLayerName( "output" );
		
		mlpNet.getLayers().put("input", inputLayer);
		mlpNet.getLayers().put("bias", biasLayer);
		mlpNet.getLayers().put("hidden", hiddenLayer);
		mlpNet.getLayers().put("output", outputLayer);
		
		// Neurons
		mlpNet.addNeurons(
					(params.getInputDimX() //63
					* 
					params.getInputDimY()),
					"plain",
					"hidden",
					inputLayer);
		
		mlpNet.addNeurons(
				1,
				"plain",
				"hidden",
				biasLayer);
		
		mlpNet.addNeurons( 
				params.getHiddenDim()-1, //63
				"sigmoid",
				"hidden",
				hiddenLayer);
		
		mlpNet.addNeurons(
				params.getOutputDim(), //3
				"sigmoid",
				"output",
				outputLayer);
		
		mlpNet.connectLayers(inputLayer, hiddenLayer);
		mlpNet.connectLayers(biasLayer, hiddenLayer);
		mlpNet.connectLayers(biasLayer, outputLayer);
		mlpNet.connectLayers(hiddenLayer, outputLayer);
		
		// Network ready
		this.isInitalized = true;
	}
	
	@SuppressWarnings("unchecked")
	public void runMlp() {
		
		if (this.mlpFactory == null) {
			return;
		}
				
		//MlpNetFactory mlpFactory = new MlpNetFactoryImpl();
		//NetFactory netFactory = new NetFactoryImpl();
		
		//MlpNet mlpNet = mlpFactory.createMlpNet();
		
		//this.createMlpNet( mlpNet );
		ImagesBean bean = ImagesBean.getInstance();
		
		HashMap<Integer, HashMap> inputsForEpoch = 
			this.setupMlpEpochInputs(bean.getFileNameTrainData());
		
		HashMap<Integer, HashMap> targetsForEpoch = 
			this.setupMlpEpochTargets(bean.getFileNameTrainTargetData());
		
		// Bias input is always 1.0
		HashMap bias = new HashMap();
		bias.put(0, 1.0);
		mlpNet.loadLayerInputs(
				(Layer) mlpNet.getLayers().get("bias"), 
				 bias);
		
		HashMap printOrderOfLayers = new HashMap();
		printOrderOfLayers.put(0, "input");
		printOrderOfLayers.put(1, "bias");
		printOrderOfLayers.put(2, "hidden");
		printOrderOfLayers.put(3, "output");
		
		int errBufferSize = 5;
		float[] errorsBuffer = new float[errBufferSize];
		int bufferPosition = 0;
		
		
		for (int epoch = 1; epoch <= this.epochsCount; epoch++ ) {
			Double epochError = 0.0;
			//Iterator iterKeys = inputsForEpoch.keySet().iterator();
			//while (iterKeys.hasNext()) {
			for (Integer keyInt = 0; keyInt < inputsForEpoch.keySet().size(); keyInt++) {
				//Integer keyInt = (Integer) iterKeys.next();
				
				mlpNet.loadLayerInputs(
						(Layer) mlpNet.getLayers().get("input"), 
						(HashMap) inputsForEpoch.get(keyInt) );
				
				TargetValuesContainer targetValuesContainer = 
					mlpNet.getTargetValuesContainer();
				targetValuesContainer.setValues( 
						(Map) targetsForEpoch.get( keyInt ) );
				
				//System.out.println("================= BEFORE RUN");
				//printNet(mlpNet, printOrderOfLayers);
				this.runMlpSignal( mlpNet );
				//System.out.println("================= BEFORE ERROR");
				//printNet(mlpNet, printOrderOfLayers);
				this.runMlpError( mlpNet );
				//System.out.println("================= BEFORE TEACHING");
				//printNet(mlpNet, printOrderOfLayers);
				this.runMlpTeach( mlpNet );
				
				//double out = (Double) mlpNet.readLayerOutputs( 
				//				(Layer) mlpNet.getLayers().get( "output" ) ).get( 0 );
				
				//System.out.println("++++");
				//System.out.println("out="+out+";");
				double netError = mlpNet.calculateNetworkError();
				
				epochError += netError;

				//System.out.println("================= AFTER TEACHING");
				//printNet(mlpNet, printOrderOfLayers);
			}
			//System.out.println();
			String epochErrorText = "epoch Nr."+epoch+"; error = "+epochError+";" + "\n";
			System.out.print(epochErrorText);
			//view.trainLogTextArea.setText(
			//		view.trainLogTextArea.getText() +
			//		epochErrorText);
			epochsErrors.add(epochError);
			
			if (bufferPosition == errBufferSize) {
				ErrorGraphView.appendToDataSet(errorsBuffer);
				bufferPosition = 0;
				errorsBuffer = new float[errBufferSize];
			} else {
				errorsBuffer[bufferPosition] = 
					epochError.floatValue();
				bufferPosition++;
			}
			
			// at the end of each epoch - decrease learn rate
			double newLearnRate =
				mlpNet.getParamsContainer().getLearnRate() * 
				mlpNet.getParamsContainer().getLearnRateDecreaseCoef();	
			mlpNet.getParamsContainer().setLearnRate( newLearnRate );
		}
		
		// TODO - flush to graph error Buffer
		float[] flushBuffer = new float[bufferPosition];
		for (int i = 0; i < flushBuffer.length ; i++) {
			flushBuffer[i] = errorsBuffer[i];
			ErrorGraphView.appendToDataSet(flushBuffer);
		}
		
	}
	
	public void runTesting() {
		ImagesBean bean = ImagesBean.getInstance();

		HashMap<Integer, HashMap> inputsForEpoch = 
			this.setupMlpEpochInputs(bean.getFileNameTestData());
		
		HashMap<Integer, HashMap> targetsForEpoch = 
			this.setupMlpEpochTargets(bean.getFileNameTestTargetData());
		
		// TESTING
		System.out.println("Testing:");
		//inputsForEpoch = this.setupMlpEpochInputsForTest();
		
		for (Integer keyInt = 0; keyInt < inputsForEpoch.keySet().size(); keyInt++) {
			//Integer keyInt = (Integer) iterKeys.next();
			
			mlpNet.loadLayerInputs(
					(Layer) mlpNet.getLayers().get("input"), 
					(HashMap) inputsForEpoch.get(keyInt) );
			
			TargetValuesContainer targetValuesContainer = 
				mlpNet.getTargetValuesContainer();
			targetValuesContainer.setValues( 
					(Map) targetsForEpoch.get( keyInt ) );
			
			this.runMlpSignal( mlpNet );
			
			
			Iterator iterNeurons = 
				((Layer) mlpNet.getLayers().get("output"))
						.getNeurons().values().iterator();
			while (iterNeurons.hasNext()) {
				MlpNeuron neuron = (MlpNeuron) iterNeurons.next();
				
				String testResultForIteration =
					"neuron id="+neuron.getNeuronId()+"; out="+neuron.getOutput();
				System.out.println(testResultForIteration);
				//view.testsLogsTextArea.setText(
				//		view.testsLogsTextArea.getText() +
				//		testResultForIteration + "\t");
			}
			double netError = this.mlpNet.calculateNetworkError();
			System.out.println("NET ERROR = "+netError);
			double[] e = new double[1];
			e[0] = netError;
			TestGraphView.appendToDataSet( e , keyInt);
			
			////// fill classification results
			int winnerNeuronId = 
				this.getWinnerNeuronId();
			int resultTargetVectorId =
				this.getResultVectorId();
			lv.bond.science.nnstudio.ui.testresult.TestResult tResult = 
				new lv.bond.science.nnstudio.ui.testresult.TestResult(
						winnerNeuronId,
						resultTargetVectorId);
			
			TestResultView.addTestResult(tResult);
			
		}
	}
	
	private int getResultVectorId() {

		Map targetMap = this.mlpNet.getTargetValuesContainer().getValues();
		
		return getKeyOfMaximumValue(targetMap);
	}
	
	private int getKeyOfMaximumValue(Map map) {
		int keyOfMaxValue = -1;
		double maxValue = Double.MIN_VALUE;
		Iterator iter = map.keySet().iterator();
		while (iter.hasNext()) {
			Integer key = (Integer) iter.next();
			Double value = (Double) map.get(key);
			if (value > maxValue) {
				keyOfMaxValue = key;
				maxValue = value;
			}
		}
		
		return keyOfMaxValue;
	}

	private int getWinnerNeuronId() {
		HashMap outputMap = new HashMap();
		outputMap = this.mlpNet.readLayerOutputs( 
							(Layer) this.mlpNet.getLayers().get("output") );
		
		return getKeyOfMaximumValue(outputMap);
	}

	private double[] listToArray(ArrayList list) {
		double[] result = new double[list.size()];
		int i = 0;
		for (Iterator iter = list.iterator(); iter.hasNext();) {
			double d = (Double) iter.next();
			result[i] = d;
			i++;
		}
		
		return result;
	}
	
	private HashMap<Integer, HashMap> setupMlpEpochInputs(String path) {
		HashMap<Integer, HashMap> inputsForEpoch = 
			new HashMap<Integer, HashMap>();
		
		//ImagesBean bean = ImagesBean.getInstance();
		Image trainImage = new Image(null, path);//bean.getFileNameTrainData());
		ImageData trainImageData = trainImage.getImageData();
		
		// height of one Image is equal to width 
		int height = trainImageData.height;
		int width = trainImageData.width;
		if (width % (height + 1) != 0) {
			System.out.println("Wrong format of train data!");
			return null;
		}
		
		int countOfImages = width / (height + 1);
		
		for (int imageNo = 0; imageNo < countOfImages; imageNo++) {
			HashMap inputValues = new HashMap();
			// for all lines
			for (int lineNo = 0; lineNo < height; lineNo ++) {
				ArrayList imageRow = new ArrayList();
				// for all columns in current image
				for (int columnNo = 0; columnNo < height; columnNo++) {
					int x =(imageNo*(height+1))+columnNo;
					int y =lineNo;
					int in = trainImage.getImageData().getPixel(x, y);
					switch (in) {
						case  -1: in = 0; break;
						case 255: in = 1; break;
					};
					System.out.println("x="+x+"; y="+y+"; in="+in+";");
					imageRow.add(new Double(in));
				}
				//double[] inputRow = (Double[]) imageRow.toArray();
				put(inputValues, listToArray(imageRow));
			}
			inputsForEpoch.put(imageNo, inputValues );
		}
		
		/*
		// Setup inputs
		HashMap inputValues = new HashMap();
		put( inputValues, getDouble("0,0,0,1,0,0,0") );
		put( inputValues, getDouble("0,0,0,1,0,0,0") );
		put( inputValues, getDouble("0,0,0,1,0,0,0") );
		put( inputValues, getDouble("0,0,1,0,1,0,0") );
		put( inputValues, getDouble("0,0,1,0,1,0,0") );
		put( inputValues, getDouble("0,1,0,0,0,1,0") );
		put( inputValues, getDouble("0,1,1,1,1,1,0") );
		put( inputValues, getDouble("0,1,0,0,0,1,0") );
		put( inputValues, getDouble("0,1,0,0,0,1,0") );
		inputsForEpoch.put(0, inputValues );
		
		inputValues = new HashMap();
		put( inputValues, getDouble("1,1,1,1,1,1,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,1,1,1,1,1,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,1,1,1,1,1,0") );
		inputsForEpoch.put(1, inputValues );
		
		inputValues = new HashMap();
		put( inputValues, getDouble("0,0,1,1,1,0,0") );
		put( inputValues, getDouble("0,1,0,0,0,1,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("0,1,0,0,0,1,0") );
		put( inputValues, getDouble("0,0,1,1,1,0,0") );
		inputsForEpoch.put(2, inputValues );
		*/
		return inputsForEpoch;
	}
	
	private HashMap<Integer, HashMap> setupMlpEpochInputsForTest() {
		HashMap<Integer, HashMap> inputsForEpoch = 
			new HashMap<Integer, HashMap>();
		
		// Setup inputs
		HashMap inputValues = new HashMap();
		put( inputValues, getDouble("0,0,0,1,0,0,0") );
		put( inputValues, getDouble("0,0,0,1,0,0,0") );
		put( inputValues, getDouble("0,0,1,0,1,0,0") );
		put( inputValues, getDouble("0,1,0,0,0,1,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,1,1,1,1,1,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		put( inputValues, getDouble("1,0,0,0,0,0,1") );
		inputsForEpoch.put(0, inputValues );
		
		inputValues = new HashMap();
		put( inputValues, getDouble("1,1,1,1,1,1,0") );
		put( inputValues, getDouble("0,1,0,0,0,0,1") );
		put( inputValues, getDouble("0,1,0,0,0,0,1") );
		put( inputValues, getDouble("0,1,0,0,0,0,1") );
		put( inputValues, getDouble("0,1,1,1,1,1,0") );
		put( inputValues, getDouble("0,1,0,0,0,0,1") );
		put( inputValues, getDouble("0,1,0,0,0,0,1") );
		put( inputValues, getDouble("0,1,0,0,0,0,1") );
		put( inputValues, getDouble("1,1,1,1,1,1,0") );
		inputsForEpoch.put(1, inputValues );
		
		inputValues = new HashMap();
		put( inputValues, getDouble("0,1,1,1,1,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,1,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,0,0") );
		put( inputValues, getDouble("1,0,0,0,0,1,0") );
		put( inputValues, getDouble("0,1,1,1,1,0,0") );
		inputsForEpoch.put(2, inputValues );
		
		return inputsForEpoch;
	}
	
	@SuppressWarnings("unchecked")
	private HashMap<Integer, HashMap> setupMlpEpochTargets(String pathToTargetsImage) {
		HashMap<Integer, HashMap> targetsForEpoch = 
			new HashMap<Integer, HashMap>();
		
		//ImagesBean bean = ImagesBean.getInstance();
		Image trainImage = new Image(null, pathToTargetsImage);//bean.getFileNameTrainTargetData());
		ImageData trainImageData = trainImage.getImageData();
		
		// height of one Image is equal to width 
		int height = trainImageData.height;
		int width = trainImageData.width;
		//if (width % (height + 1) != 0) {
		//	System.out.println("Wrong format of train data!");
		//	return null;
		//}
		
		// each row is target vector
		
		// for each row
		for (int rowNo = 0; rowNo < height; rowNo++) {
			HashMap targetValues = new HashMap();
			//for all columns
			for (int columnNo = 0; columnNo < width; columnNo++ ) {
				int x = columnNo;
				int y = rowNo;
				int in = trainImage.getImageData().getPixel(x, y);
				switch (in) {
					case  -1: in = 0; break;
					case 255: in = 1; break;
				};
				System.out.println("x="+x+"; y="+y+"; in="+in+";");
				targetValues.put( columnNo, new Double(in) );
			}
			targetsForEpoch.put( rowNo, targetValues);
		}
		
		/*
		// Setup new target value(s)
		HashMap targetValues = new HashMap();
		targetValues.put( 0, 1.0d );
		targetValues.put( 1, 0.0d );
		targetValues.put( 2, 0.0d );
		targetsForEpoch.put( 0, targetValues);
		
		targetValues = new HashMap();
		targetValues.put( 0, 0.0d );
		targetValues.put( 1, 1.0d );
		targetValues.put( 2, 0.0d );
		targetsForEpoch.put( 1, targetValues);
		
		targetValues = new HashMap();
		targetValues.put( 0, 0.0d );
		targetValues.put( 1, 0.0d );
		targetValues.put( 2, 1.0d );
		targetsForEpoch.put( 2, targetValues);
		*/
		return targetsForEpoch;
	}
	
	@SuppressWarnings("unchecked")
	private void runMlpError( MlpNet mlpNet ) {
		HashMap layersForErrorAndTeaching = new HashMap();
		layersForErrorAndTeaching.put(new Integer( 0 ), "output" );
		layersForErrorAndTeaching.put(new Integer( 1 ), "hidden" );
		//layers.put(new Integer( 2 ), "input" );
		mlpNet.propagateError( layersForErrorAndTeaching );
	}

	@SuppressWarnings("unchecked")
	private void runMlpTeach( MlpNet mlpNet ) {
		HashMap layersForErrorAndTeaching = new HashMap();
		layersForErrorAndTeaching.put(new Integer( 0 ), "output" );
		layersForErrorAndTeaching.put(new Integer( 1 ), "hidden" );
		//layers.put(new Integer( 2 ), "input" );
		//mlpNet.propagateError( layersForErrorAndTeaching );
		
		mlpNet.teachLayers( layersForErrorAndTeaching );
	}
	
	@SuppressWarnings("unchecked")
	private void runMlpSignal( MlpNet mlpNet ) {
		HashMap signalRun = new HashMap();
		signalRun.put(0, "hidden");
		signalRun.put(1, "output");
		mlpNet.runSignal( signalRun );
	}
	
	private double[] getDouble(String str) {
		String[] strs = str.split(",");
		double[] result = new double[strs.length];
		for (int i = 0; i < strs.length; i++) {
			result[i] = Double.valueOf(strs[i]);
		}
		return result;
	}
	
	@SuppressWarnings("unchecked")
	private void put(Map map, double[] d) {
		for (int count = 0; count < d.length; count++ ) {
			map.put( map.size(), d[count] );
		}
	}

	public boolean isInitalized() {
		return isInitalized;
	}

	public MlpNet getMlpNet() {
		return mlpNet;
	}

	public ArrayList getEpochsErrors() {
		return epochsErrors;
	}

}
