
import java.util.HashMap;
import java.util.Set;
import net.sf.javaml.classification.AbstractClassifier;
import net.sf.javaml.core.Dataset;
import net.sf.javaml.core.Instance;
import net.sf.javaml.distance.EuclideanDistance;

/**
 * k-Nearest Neighbor classifier.
 * @author khenke
 */
public class MyKNN extends AbstractClassifier{

    private int k;
    private Dataset training;

    public MyKNN(){
       this(1);
    }

    public MyKNN(int k){
        this.k = k;
    }

    @Override
    public void buildClassifier(Dataset data){
        this.training = data;
    }

    /**
     * Classifies a document instance based on the <training> data. The <k>
     * nearest neighbors are determined by calculating their euclidean distance
     * from the <instance>. The prediction is based on majority voting amongst
     * these neighbors.                        c
     * @param instance The document vector instance subject to classification
     * @return prediction The classifiers prediction class.
     */
    @Override
    public Object classify(Instance instance){
        Set<Instance> neighbors = training.kNearest(k, instance, new EuclideanDistance());

        HashMap<Object, Double> distribution = new HashMap<Object, Double>();
        /* Put all classes in the distribution map and initialize them to 0*/
        for(Object o : training.classes())
            distribution.put(o, 0.0);
        /* Count the number of neighbors in each class */
        for(Instance i : neighbors)
            distribution.put(i.classValue(), distribution.get(i.classValue())+1);
          
        double max = 0;
        Object prediction = null;
        Set<Object> keyset = distribution.keySet();

        /* Majority voting */
        for(Object o : keyset){
            if(distribution.get(o) > max){
                max = distribution.get(o);
                prediction = o;
            }
        }
        return prediction;
    }
}
