package algorithm.classification.binary;

import java.util.*;

import algorithm.Classifier;
import algorithm.LearningClassifier;
import algorithm.ScoringMachine;
import algorithm.classification.binary.parameters.PerceptronParameters;
import tools.MathematicTool;
import tools.Pair;
import data.set.OfflineSet;
import data.set.OfflineSetIterator;
import data.set.implementation.OfflineSet_Memory;
//import data.set.adaptator.SupervisedOfflineSet_To_PairOfflineSetUsingClassifier;
import data.vectors.DoubleVector;
import data.vectors.DoubleVectorIterator;
import data.vectors.DoubleVectorWritable;
import data.vectors.implementation.DoubleVector_Sparse_Memory;

public class PerceptronLoss_L2Regularized implements 
		LearningClassifier<DoubleVector,Boolean>
{	
	protected PerceptronParameters params;
	protected DoubleVectorWritable w;
	
	public PerceptronLoss_L2Regularized(PerceptronParameters params) {
		super();
		this.params = params;
	}
	
	/////////	Implementation des interfaces
	
	
	public double getScore(DoubleVector x) {
		return w.computeDOTProduct(x);
	}
	
	public double getScore(Pair<DoubleVector, Boolean> x) 
	{
		return(w.computeDOTProduct(x.getX()));
	}
	
	public Boolean map(DoubleVector x) {
		double s1=getScore(new Pair<DoubleVector,Boolean>(x,true));
		if (s1>0) return(true); else return(false);
	}
	
	////////
	
	public void learn(OfflineSet<Pair<DoubleVector,Boolean>> set) {
		
		if (w==null){
			w = new DoubleVector_Sparse_Memory(set.iterator().next().getX().size());
		}
		
		int nbu=0;
		System.out.print("iteration : ");
		for(int iteration=0;iteration<params.nbiterations;iteration++)
		{
			if(iteration%50==0)
				System.out.print(iteration+" ");
			
			double erreur_totale=0.0;
			OfflineSetIterator<Pair<DoubleVector,Boolean>>  iterator=set.inputsetiterator();
//			boolean flag=false;
			while(iterator.hasNext())
			{
				iterator.next();
				Pair<DoubleVector,Boolean> p=iterator.getCurrentObject();
				double y=1.0;
				if (p.getY()==false) y=-1.0;
				double dot= w.computeDOTProduct(p.getX());
				double l=params.loss.getValue(dot*y);
				if (l!=0)
				{
					nbu++;
					erreur_totale+=l;			
					w.add(p.getX(),
							-params.getStep()*y*params.getLoss().getDerivatedValue(y*dot));
					
					if (nbu==params.getNumUpdate()) 
					{
						//Log.sendMessage("info", this.getClass().getName(),parameters.normSquare());
						double coef=Math.pow((1-params.getLambda()*params.getStep()),nbu);
						//Log.sendMessage("info", this.getClass().getName(),coef);
						w.product(coef); nbu=0;
					//	Log.sendMessage("info", this.getClass().getName(),"=="+parameters.normSquare());
					}

				}
			}
			
			if(params.isWeightDecay())
				params.setStep(params.getStep()*0.99);
			
			params.setLossIter(iteration, erreur_totale);

			if (erreur_totale==0){
				System.out.println("terminate at "+iteration+"th iteration");
				break;
			}
		}
		System.out.println();
	}
	
//	public void setLearningParameters(PerceptronParameters p) {
//		this.params = p;
//	}
	
	public PerceptronParameters getLearningParameters(){
		return params;
	}

	public DoubleVector getWeight() {
		return w;
	}

	public void setWeight(DoubleVectorWritable w) {
		this.w = w;
		/*DoubleVectorIterator it=w.getIterator() ;
		this.w = new DoubleVector_Sparse_Memory(w.size()) ;
		while(it.hasNext()) {
			this.w.setValue(it.getCurrentFeature(),it.getCurrentValue()) ;
		}*/
		
	}
	

	


}