package de.fzi.kasma.learner.genetic;

import de.fzi.kasma.learner.data.Dataset;
import de.fzi.kasma.learner.function.kernel.HypothesisKernel;
import de.fzi.kasma.learner.function.prediction.HypothesisPredictionFunction;
import de.fzi.kasma.learner.function.scoring.LossScoringFunction;
import ec.EvolutionState;
import ec.Individual;
import ec.Problem;
import ec.simple.SimpleFitness;
import ec.simple.SimpleProblemForm;

public class RLProblem extends Problem implements SimpleProblemForm{

	/**
	 * 
	 */
	private static final long serialVersionUID = 3400935019153652744L;

	@Override
	public void describe(Individual ind, EvolutionState state,
			int subpopulation, int threadnum, int log, int verbosity) {
		// TODO Auto-generated method stub
		
	}

	@Override
	public void evaluate(EvolutionState state, Individual ind,
			int subpopulation, int threadnum) {
		// TODO Fitness evaluation here
	    if (ind.evaluated) return;

        long fit = 0;
        RLIndividual ind2 =(RLIndividual)ind;
        
        Dataset dataset=((RLInitializer)state.initializer).getDataset();
        
        // where a convolution kernel is constructed based on the clauses
        // TODO: optimize the kernel calculation using graph index
        HypothesisKernel hk = new HypothesisKernel(ind2.clauses);
        // In this function, svm is called and the prediction coefficients are learned
        HypothesisPredictionFunction hf = new HypothesisPredictionFunction(dataset, hk);
        // Loss fcn calculates the fitness by using prediction fcn and dataset.
        LossScoringFunction lsf = new LossScoringFunction(hf, dataset);
        try {
			fit = (long) lsf.getScore();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
        
        ((SimpleFitness)ind2.fitness).setFitness(state,fit, false);
        ind2.evaluated = true;
        

		
	}

}
