#include "kNNClassifier.h"

#include <utility>
#include <iostream>
#include <algorithm>
#include <assert.h>

#include "DataSet.h"
#include "Sample.h"

kNNClassifier::kNNClassifier(const int& k) : k_(k) 
{

}

kNNClassifier::~kNNClassifier()
{

}

int kNNClassifier::getK() const
{
  return k_;
}

void kNNClassifier::setK(const int& k) 
{
  k_ = k;
}

int kNNClassifier::test(const Sample& s) const
{
		//init
  float distance(0);
  int ties(0),k(k_),max(0),pos(-1);
  std::vector<int> votes(nrClasses_,0);
  std::vector<std::pair<float, int> > lut; // lookuptable: distance->label 

  if(k > int(in_.size()) ){ //k is to high!
	k=int(in_.size()-1);
	//std::cerr<< " =>in kNN: changed k, since it was too high...\t\t [epic-fail]\a\n";
  }

  //distance to tested sample for all trained samples
  for(unsigned int all=0; all<in_.size(); ++all){
  distance=0;
	  for(unsigned int dimension=0; dimension < in_[1].input().size() ;++dimension){
		  distance+=   (s.input(dimension) - in_[all].input(dimension))
			         * (s.input(dimension) - in_[all].input(dimension)); // rude square 
	  }
	  std::pair <float, int> pushme(distance,in_[all].label()); // create pair
	  lut.push_back(pushme); // push it into lookup table
  }

  do{ // at least once && as long as two are ties 
	  k-=ties;                 // decrement k - in case of multiple ties remove more k's (in first  round == k)
	  max=0;
	  //count votes for current k 
	  for(int K=k-1;K >= 0; --K){
		 std::nth_element(lut.begin(), lut.begin()+K, lut.end()); // just sort the (kth) element- the one at hand
			// std::cout << int(10000*lut[K].first) <<"/" << lut[K].second<< " ";
		 votes[lut[K].second]++; // vote of the k'th neighbor...
	  }
	  //std::cout << "\n";
		
	  for(unsigned int f=0; f < votes.size(); ++f){ // now consider the votes
		  if(max<votes[f]){          // search max
			max=votes[f];            // remember new max
			ties=0;	                 // if the max is new, forget old ties
			pos=f;                   // but remember position - we want to return this 
		  }else if(max==votes[f]){ // watch out for ties...
			ties++;
		  }
		  votes[f]=0; // prepare for additional run...
	  }
	  if(ties) // if there must be an additional run - report!
		  std::cout << "ties: " 
		            << ties 
					<< " new k: "
					<< k-ties 
					<<"\n"; 
  }while(ties>0); // additional run?


  assert(pos>=0); // if you did not find any nearest sample there is something wrong ...
  return pos; //finally
}



void kNNClassifier::train(const DataSet& ds)
{
  // init
  int max(-1);
  // save samples && find max label - assume number of labels
  for(unsigned int s=0; s< ds.samples().size();++s){
		in_.push_back(ds.sample(s));
		if(ds.label(s) > max)
			max = ds.sample(s).label();
  }
  nrClasses_ = max+1; // for sake of simplicity I assume, that all natural numbers are classes between 0 and 'max'...
}