package rs.fon.neuroph.classification;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;

import org.neuroph.core.Connection;
import org.neuroph.core.NeuralNetwork;
import org.neuroph.core.Neuron;
import org.neuroph.core.learning.IterativeLearning;
import org.neuroph.core.learning.SupervisedTrainingElement;
import org.neuroph.core.learning.TrainingElement;
import org.neuroph.core.learning.TrainingSet;
import org.neuroph.nnet.comp.InputNeuron;

import rs.fon.neuroph.classification.NeurophClassificationModel;

import com.rapidminer.example.Attribute;
import com.rapidminer.example.Example;
import com.rapidminer.example.ExampleSet;
import com.rapidminer.operator.Model;
import com.rapidminer.operator.OperatorException;

public class NeurophClassificationAdapter {

	private  HashMap<Double, ArrayList<Double>> labelValueMap;// hash map ciji su kljucevi indexi labela za trening set,a vrednosti su niz doublova za tu klasu
	private int maxIterations;
	
	public NeurophClassificationAdapter(int maxIterations) {
		this.labelValueMap = new HashMap<Double, ArrayList<Double>>();
		this.maxIterations = maxIterations;
	}

	public Model trainNNModel(String filePath, ExampleSet exampleSet) throws OperatorException {
		// prepare
		TrainingSet<TrainingElement> ts = convertExampleSetToTrainingSet(exampleSet);
		NeuralNetwork nnet = NeuralNetwork.load(filePath);

		//Check if the NN is compatible with the given dataset
		checkNNCompatibility(exampleSet, nnet);
		
		// learn
		IterativeLearning learning = (IterativeLearning) nnet.getLearningRule();
		learning.setMaxIterations(maxIterations); // TODO: hack! should be done in Neuroph
		nnet.learn(ts);

		// return model
		Model resultModel = new NeurophClassificationModel(exampleSet, nnet, this);
		return resultModel;
	}

	public TrainingSet<TrainingElement> convertExampleSetToTrainingSet(ExampleSet es) throws OperatorException{

		if (es.getAttributes().getLabel()==null)
			throw new OperatorException("No label defined in the training set");

		TrainingSet<TrainingElement> ts = new TrainingSet<TrainingElement>();

		//porazmisli da li ovde da pozivas ovu metodu, stavila si napamet
		createOutputVectorMapFromLabelValues(es);

		for (Example e : es) {
			//get input values of an example
			ArrayList<Double> values = new ArrayList<Double>();
			for (Iterator<Attribute> i = e.getAttributes().iterator(); i.hasNext();)
				values.add(e.getNumericalValue(i.next()));

			//get label value
			Double labelIndex= e.getLabel();

			//get output vector from label value
			ArrayList<Double> outputVector = getOutputVectorFromLabelIndex(labelIndex);

			//create training element from input and output vector
			ts.addElement(new SupervisedTrainingElement(values, outputVector));
		}

		return ts;

	}

	private void createOutputVectorMapFromLabelValues(ExampleSet es) {
		// int numberOfLabels = es.getAttributes().getLabel().getMapping().getValues().size();


		ArrayList<Double> labelValues = new ArrayList<Double>();
		for (Example e : es) {
			// ovo moze drugacije
			//double labelValue = e.getAttributes().getLabel().getValue(e.getDataRow());
			double labelValue= e.getLabel();

			// uzima sve vrednosti labela
			// mozda da uopste ne pitam da li je null ako vec ne radi sa takvim ds
			if (labelValues != null && !labelValues.contains(labelValue)) {
				labelValues.add(labelValue);
			}
		}

		for (Double double1 : labelValues) {
			System.out.println(double1);
		}
		for (int i = 0; i < labelValues.size(); i++) {
			ArrayList<Double> outputVector = new ArrayList<Double>();

			for (int j = 0; j < labelValues.size(); j++) {
				outputVector.add((double)0);
			}
			outputVector.set(i, (double) 1);

			labelValueMap.put(labelValues.get(i),outputVector);
		}
	}

	public  ArrayList<Double> getOutputVectorFromLabelIndex(double labelIndex){
		return labelValueMap.get(labelIndex);
	}

	
	public double getLabelIndexFromOutputVector(ArrayList<Double> classArray){
		for (Double key : labelValueMap.keySet()) {
			if(labelValueMap.get(key).equals(classArray))
				return key;
		}
		return -1.0;
	}

	private void checkNNCompatibility(ExampleSet exampleSet, NeuralNetwork nnet) throws OperatorException {

		int numberOfInputAttributes=0; 
		for (Iterator<Attribute> i=exampleSet.getAttributes().iterator(); i.hasNext(); ){
			i.next();
			numberOfInputAttributes++;
		}

		int inputNeuronsMissing = numberOfInputAttributes - nnet.getInputNeurons().size();
		if (inputNeuronsMissing>0){
			Neuron sampleNeuron = nnet.getInputNeurons().get(0);
			for (int i=0; i<inputNeuronsMissing; i++){
				InputNeuron n = new InputNeuron();
				for (Connection c: sampleNeuron.getOutConnections()){
					c.getToNeuron().addInputConnection(n);
				}
				sampleNeuron.getParentLayer().addNeuron(n);
				nnet.getInputNeurons().add(n);
			}
		}

		if (inputNeuronsMissing<0)
			for (int i=0; i<-inputNeuronsMissing; i++)
				nnet.getInputNeurons().remove(0);

		if (numberOfInputAttributes != nnet.getInputNeurons().size())
			throw new OperatorException("Number of input neurons does not match the number of attributes");
	}
}
