/*
Copyleft 2006 by T.S.Yo
*/

package exp.regression;
import exp.util.*;
import ec.app.regression.*;
import ec.*;
import ec.util.*;
import ec.coevolve.*;
import ec.vector.*;
import ec.simple.*;
import ec.gp.*;
import ec.gp.koza.*;
//import java.io.*;

/* 
 * MyRegression.java
 * 
 * Created: 2006.11.03
 * By: T.S.Yo
 */

/**
 * MyRegression extends Regression for Koza (quartic) Symbolic Regression problem.
 *
 * @author T.S.Yo
 * @version 1.0 
 */
 
public class Regression2 extends Regression implements SimpleProblemForm
    {
    public static final String P_DATA = "data";
    public static final String P_SIZE = "size";
    public static final String MAX_TP = "max-test-point";
    public static final String MIN_TP = "min-test-point";
    public static final String NUM_TP = "number-of-test-points";
    public static final String MIN_ER = "zero-error";
    public float maxTP;
    public float minTP;
    public float minER;
    public int numTP;

    public double currentValue;
    public int trainingSetSize;
    
    // these are read-only during evaluation-time, so
    // they can be just light-cloned and not deep cloned.
    // cool, huh?
    
    public double inputs[];
    public double outputs[];

    // we'll need to deep clone this one though.
    public RegressionData input;

    public double func(double x)
        { return x*x*x*x + x*x*x + x*x + x; }

    public Object clone()
        {
        // don't bother copying the inputs and outputs; they're read-only :-)
        // don't bother copying the currentValue; it's transitory
        // but we need to copy our regression data
        Regression myobj = (Regression) (super.clone());

        myobj.input = (RegressionData)(input.clone());
        return myobj;
        }

    public void setup(final EvolutionState state,
                      final Parameter base)
        {
        // very important, remember this
        super.setup(state,base);
        //----------------------------------------------------------------------
        // Parameters for regression Problem
        maxTP = state.parameters.getFloat(base.push(MAX_TP),null,1);
        minTP = state.parameters.getFloat(base.push(MIN_TP),null,-1);
        minER = state.parameters.getFloat(base.push(MIN_ER),null,-1);
        numTP = state.parameters.getInt(base.push(NUM_TP),null,100);
        if (numTP<1) state.output.fatal("Test points size must be an integer greater than 0"); 
        if (numTP!=trainingSetSize){
            state.output.message("Replace the problem size with the number of test-points");
            trainingSetSize = numTP;
        }

        // Compute our inputs so they can be copied with protoClone later
        inputs = new double[trainingSetSize];
        outputs = new double[trainingSetSize];
        
        double inc = (maxTP - minTP) / ((double)(numTP-1));
        for(int x=0;x<trainingSetSize;x++)
        {
            inputs[x] = minTP + ((double)x) * inc;
            outputs[x] = func(inputs[x]);
            //state.output.println("{" + inputs[x] + "," + outputs[x] + "},",3000,0);
        }

        // set up our input -- don't want to use the default base, it's unsafe
        input = (RegressionData) state.parameters.getInstanceForParameterEq(
            base.push(P_DATA), null, RegressionData.class);
        input.setup(state,base.push(P_DATA));
        }


    public void evaluate(final EvolutionState state, 
                         final Individual ind, 
                         final int threadnum)
        {
        if (!ind.evaluated)  // don't bother reevaluating
            {
            int hits = 0;
            double sum = 0.0;
            double result;
            for (int y=0;y<trainingSetSize;y++)
                {
                currentValue = inputs[y];
                ((GPIndividual)ind).trees[0].child.eval(
                    state,threadnum,input,stack,((GPIndividual)ind),this);

                // It's possible to get NaN because cos(infinity) and
                // sin(infinity) are undefined (hence cos(exp(3000)) zings ya!)
                // So since NaN is NOT =,<,>,etc. any other number, including
                // NaN, we're CAREFULLY wording our cutoff to include NaN.
                // Interesting that this has never been reported before to
                // my knowledge.

                final double HIT_LEVEL = 0.01;
                final double PROBABLY_ZERO = 1.11E-15;
                final double BIG_NUMBER = 1.0e15;  // the same as lilgp uses

                result = Math.abs(outputs[y] - input.x);

                if (! (result < BIG_NUMBER ) )   // *NOT* (input.x >= BIG_NUMBER)
                    result = BIG_NUMBER;

                // very slight math errors can creep in when evaluating
                // two equivalent by differently-ordered functions, like
                // x * (x*x*x + x*x)  vs. x*x*x*x + x*x

                else if (result<PROBABLY_ZERO)  // slightly off
                    result = 0.0;
                    
                if (result <= HIT_LEVEL) hits++;  // whatever!

                sum += result;              
                }
            // the fitness better be KozaFitness!
            //KozaFitness f = ((KozaFitness)ind.fitness);
            //f.setStandardizedFitness(state,(float)sum);
            //f.hits = hits;
            ((SimpleFitness)(ind.fitness)).setFitness( state, (float)(-1.0*sum), false );
            ind.evaluated = true;
            }
        }
    }
    
  
