//## begin module%3B8825AC005C.cm preserve=no
//	  %X% %Q% %Z% %W%
//## end module%3B8825AC005C.cm

//## begin module%3B8825AC005C.cp preserve=no
//	+----------------------------------------------+
//	| SVMBR - A program for training SVMs          |
//	+----------------------------------------------+
//	| * Creator, modeling, coding:                 |
//	|    Marcelo Barros de Almeida                 |
//	|    barros@smar.com.br                        |
//	|    http://litc.cpdee.ufmg.br/~barros/        |
//	| * Coding, improvements, bug fixes:           |
//	|    Bernardo Penna                            |
//	|    bprc@brfree.com.br                        |
//	+----------------------------------------------+
//
//	 Copyright(c) 2002 by Marcelo Barros de Almeida
//	                            All rights reserved
//
//	-[CVS]------------------------------------------
//	$Author$
//	$Date$
//	$Source$
//	$Name$
//	$Revision$
//	------------------------------------------------
//## end module%3B8825AC005C.cp

//## Module: svm%3B8825AC005C; Package body
//## Subsystem: <Top Level>
//## Source file: D:\textotese\svmbr\src\svm.cpp

//## begin module%3B8825AC005C.additionalIncludes preserve=no
//## end module%3B8825AC005C.additionalIncludes

//## begin module%3B8825AC005C.includes preserve=yes
//## end module%3B8825AC005C.includes

// svm
#include "svm.h"
//## begin module%3B8825AC005C.declarations preserve=no
//## end module%3B8825AC005C.declarations

//## begin module%3B8825AC005C.additionalDeclarations preserve=yes
//## end module%3B8825AC005C.additionalDeclarations


// Class SVM 

//## begin SVM::HARD_LIMIT%3B8A3AF40238.attr preserve=no  public: static SHORT {V} 1
SHORT SVM::HARD_LIMIT = 1;
//## end SVM::HARD_LIMIT%3B8A3AF40238.attr

//## begin SVM::SOFT_LIMIT%3B8A3B02009D.attr preserve=no  public: static SHORT {V} 0
SHORT SVM::SOFT_LIMIT = 0;
//## end SVM::SOFT_LIMIT%3B8A3B02009D.attr


//## begin SVM::SVMBR_VERSION%3B8B95D20234.attr preserve=no  public: static CHAR * {UA} "2.0.1"
const CHAR *SVM::SVMBR_VERSION = "2.0.1";
//## end SVM::SVMBR_VERSION%3B8B95D20234.attr













//## Other Operations (implementation)
REAL SVM::calcNorm ()
{
  //## begin SVM::calcNorm%998688357.body preserve=yes

  INT i,j;
	REAL ne  = 	data->getnumExamples();
	REAL eps = solver->getepsilon();

  for(i = 0, norm = 0.0; i < ne ; i++){
    if(solver->getalpha(i) > eps){
      for(j = 0 ; j < ne ; j++)
				if(solver->getalpha(j) > eps)
					norm += solver->getalpha(i)*solver->getalpha(j)*
										data->gettarget(i)*data->gettarget(j)*kernel->evalKernel(i,j);
    }
  }

	return norm;

  //## end SVM::calcNorm%998688357.body
}

VOID SVM::setsvmFile (CHAR *svmf)
{
  //## begin SVM::setsvmFile%998880768.body preserve=yes
	strncpy(svmFile,svmf,_MAX_PATH-1);
  //## end SVM::setsvmFile%998880768.body
}

Data * SVM::getData ()
{
  //## begin SVM::getData%1000755795.body preserve=yes
	return data;
  //## end SVM::getData%1000755795.body
}

// Additional Declarations
  //## begin SVM%3B87FDC30029.declarations preserve=yes
  //## end SVM%3B87FDC30029.declarations

// Class TrainSVM 

TrainSVM::~TrainSVM()
{
  //## begin TrainSVM::~TrainSVM%.body preserve=yes
  //## end TrainSVM::~TrainSVM%.body
}



//## Other Operations (implementation)
INT TrainSVM::run ()
{
  //## begin TrainSVM::run%998688382.body preserve=yes

	if(solver->UsingChunk()) 
		solver->run_chunck();
	else
		solver->run();

	saveResult();
	return TRUE;

  //## end TrainSVM::run%998688382.body
}

VOID TrainSVM::printSummary ()
{
  //## begin TrainSVM::printSummary%998688383.body preserve=yes
  
  CHAR tmp[40];
  cout << endl << "# Training result:" << endl;
  cout <<         "# ================" << endl;
  cout << "# Data representation: ";
  if(data->getdataType() == Data::NORMAL)      cout << "Normal" << endl;
  else if(data->getdataType() == Data::SPARSE)	cout << "Sparse" << endl;
  else if(data->getdataType() == Data::BINARY) cout << "Binary" << endl;
  cout << "# Kernel used: ";
  if(kernel->getkernelType() == Kernel::RBF)
    cout << "RBF [p1=" << kernel->getp1() << ",p2=" << kernel->getp2()  << ",p3="  << kernel->getp3() << "]" << endl;
  else if(kernel->getkernelType() == Kernel::POLY)
		cout << "Poly [p1=" << kernel->getp1() << ",p2=" << kernel->getp2()  << ",p3="  << kernel->getp3() << "]" << endl;
  else if(kernel->getkernelType() == Kernel::SIGMOID) 
    cout << "Perceptron [p1=" << kernel->getp1() << ",p2=" << kernel->getp2() << ",p3="  << kernel->getp3() << "]" << endl;
  else if(kernel->getkernelType() == Kernel::LINEAR)
    cout << "Linear [p1=" << kernel->getp1() << ",p2=" << kernel->getp2() << ",p3="  << kernel->getp3() << "]" << endl;
  cout << "# C: " << solver->getC() << endl;
  cout << "# Bias: " << solver->getbias() << endl;
  cout << "# Training time (CPU seconds): ";
  sprintf(tmp,"%3.2f",solver->getElapsedTime());
  cout << tmp << endl;
  cout << "# Iterations: " << solver->getnumIter() << endl;
  INT k = 0;
  INT dataSize = data->getnumExamples();
  for(INT i = 0; i < dataSize ; i++)
    if(solver->getalpha(i) > solver->getepsilon())
      k++;   // counts number of support vectors
  sprintf(tmp,"%2.2f%%",100.0*k/data->getnumExamples()); 
  cout << "# Support vectors: " << k << " (" << tmp << ")" << endl;
  cout << "# |w|^2: " << getnorm() << endl;
  
  if(ABS(getnorm())>0){
	REAL margin = 2.0/sqrt(getnorm());
    cout << "# Margin of separation: " << margin << endl;
  } 
  else
    cout << "# Margin of separation = infinite " << endl;
  
  cout << endl << flush;
	
  //## end TrainSVM::printSummary%998688383.body
}

INT TrainSVM::saveResult ()
{
  //## begin TrainSVM::saveResult%998880794.body preserve=yes

  INT k, i, j, n;
  ofstream file(svmFile);
	CHAR tmp[40];

  if(!file.is_open()){
    cerr << "Error opening " << svmFile << endl;
    exit(1);
  }

	INT numExamples = data->getnumExamples();
	REAL epsilon = solver->getepsilon();
	REAL C = solver->getC();
	INT dim = data->getdim();

  for(i = 0, k = 0, n = 0; i < numExamples ; i++)
    if(solver->getalpha(i) > epsilon){
      k++;   // counts number of support vectors
      if((solver->getalpha(i))<(C-epsilon))
        n++; // counts number of non-bound vectors
    }

  // line with "#" as first character are comments
  file << "###### SVMBR file version:" << endl;
  file << SVM::SVMBR_VERSION << endl;
	
  file << "###### Training time (CPU seconds): " << endl;
  sprintf(tmp,"%3.2f",solver->getElapsedTime());
  file << tmp << endl;

  file << "###### Number of iterations:" << endl;
  file << solver->getnumIter() << endl;

  file << "###### Data representation:" << endl;
  file << data->getdataType() << endl;

  file << "###### Kernel used:" << endl;
  file << kernel->getkernelType() << endl;

  file << "###### Upper limit for Lagrange multipliers:" << endl;
  file << solver->getC() << endl;

  file << "###### First kernel parameter:" << endl;
  file << kernel->getp1() << endl;

  file << "###### Second kernel parameter:" << endl; 
  file << kernel->getp2() << endl;
	
  file << "###### Third kernel parameter:" << endl;
  file << kernel->getp3() << endl;	

  file << "###### Tolerance:" << endl;
  file << solver->gettol() << endl;

  file << "###### Epsilon:" << endl;
  file << solver->getepsilon() << endl;

  file << "###### Bias:" << endl;
  file << solver->getbias() << endl;
		
  file << "###### |w|^2:" << endl;
  file << calcNorm() << endl;

  file << "###### Number of support vectors (bound + non bound):" << endl;
  file << k << endl; 

  file << "###### Number of non bound support vectors:" << endl;
  file << n << endl;

  file << "###### Support vectors + target - one per line:" << endl;

  for(i = 0; i < numExamples ; i++){
		REAL * point = data->getpoint(i);
		if(solver->getalpha(i) > epsilon){
			for(j = 0 ; j < dim ; j++)
  			file << point[j] << " ";
      file << data->gettarget(i) << endl;
    }
  }

  file << "###### Lagrange multipliers - one per line:" << endl;
  for(i = 0; i < numExamples ; i++)
    if(solver->getalpha(i) > epsilon)
      file << solver->getalpha(i) << endl;

  file << "###### Support vector indexes (First index is 0):" << endl;
  for(i = 0; i < numExamples ; i++)
    if(solver->getalpha(i) > epsilon)
      file << i << endl;

  file.close();

  return TRUE;


  //## end TrainSVM::saveResult%998880794.body
}

REAL TrainSVM::evalOutput (UINT i1)
{
  //## begin TrainSVM::evalOutput%999139422.body preserve=yes
	cerr << "TrainSVM::evalOutput not implemented";
	return 0;
  //## end TrainSVM::evalOutput%999139422.body
}

// Additional Declarations
  //## begin TrainSVM%3B87FDD10246.declarations preserve=yes
  //## end TrainSVM%3B87FDD10246.declarations

// Class TestSVM 







TestSVM::TestSVM (CHAR *svmf, CHAR *trainf, CHAR *outf, SHORT limi)
  //## begin TestSVM::TestSVM%999139404.hasinit preserve=no
  //## end TestSVM::TestSVM%999139404.hasinit
  //## begin TestSVM::TestSVM%999139404.initialization preserve=yes
  //## end TestSVM::TestSVM%999139404.initialization
{
  //## begin TestSVM::TestSVM%999139404.body preserve=yes

	strncpy(svmFile,svmf,_MAX_PATH-1);
	strncpy(trainPattFile,trainf,_MAX_PATH-1);
	strncpy(outFile,outf,_MAX_PATH-1);
	limits = limi;
	loadSVM();

  //## end TestSVM::TestSVM%999139404.body
}


TestSVM::~TestSVM()
{
  //## begin TestSVM::~TestSVM%.body preserve=yes

	for(INT i = 0 ; i < numSV ; i++)
		delete supVec[i];
	delete supVec;
	delete supVecTarget;
	delete evalTarget;

  //## end TestSVM::~TestSVM%.body
}



//## Other Operations (implementation)
INT TestSVM::run ()
{
  //## begin TestSVM::run%998688384.body preserve=yes

	
  // initialize random seed
  SRAND(time(NULL));

  // start to count time
  crono.startCronometer();

  evalAllOutputs();

  // finish time counting
  crono.stopCronometer();
  if(strlen(outFile))
	saveResult();

  return TRUE;
	
  //## end TestSVM::run%998688384.body
}

VOID TestSVM::printSummary ()
{
  //## begin TestSVM::printSummary%998688385.body preserve=yes
 
  CHAR tmp[40];
  cout << endl << "# Testing result:" << endl;
  cout <<         "# ===============" << endl;
  cout << "# Data representation: ";
  if(data->getdataType() == Data::NORMAL)      cout << "Normal" << endl;
  else if(data->getdataType() == Data::SPARSE)	cout << "Sparse" << endl;
  else if(data->getdataType() == Data::BINARY) cout << "Binary" << endl;
  cout << "# Kernel used: ";
  if(kernel->getkernelType() == Kernel::RBF)
    cout << "RBF [p1=" << kernel->getp1() << ",p2=" << kernel->getp2()  << ",p3="  << kernel->getp3() << "]" << endl;
  else if(kernel->getkernelType() == Kernel::POLY)
		cout << "Poly [p1=" << kernel->getp1() << ",p2=" << kernel->getp2()  << ",p3="  << kernel->getp3() << "]" << endl;
  else if(kernel->getkernelType() == Kernel::SIGMOID) 
    cout << "Perceptron [p1=" << kernel->getp1() << ",p2=" << kernel->getp2() << ",p3="  << kernel->getp3() << "]" << endl;
  else if(kernel->getkernelType() == Kernel::LINEAR)
    cout << "Linear [p1=" << kernel->getp1() << ",p2=" << kernel->getp2() << ",p3="  << kernel->getp3() << "]" << endl;
  cout << "# Bias: " << bias << endl;
  cout << "# Testing time (CPU seconds): ";
  sprintf(tmp,"%3.2f",crono.getElapsedTime());
  cout << tmp << endl;	
  cout << "# Number of input testing patterns: " << data->getnumExamples() << endl; 
  sprintf(tmp,"%02.4f%%",100.0*numCorrect/data->getnumExamples());
  cout << "# Number of well classified patterns: "  << numCorrect << " (" << tmp << ")" << endl; 
  cout << "# Mean square error: " << mse << endl << flush;
	
  //## end TestSVM::printSummary%998688385.body
}

INT TestSVM::saveResult ()
{
  //## begin TestSVM::saveResult%998880795.body preserve=yes
  INT i;
  ofstream file(outFile);
  CHAR tmp[40];

	if(!file.is_open()){
		cerr << "(TestSVM::saveResult) Error when opening " << outFile << endl;
		return FALSE;
	}

	cout << "Saving outputs..." << endl << flush;
 
  // line with "#" as first character are comments
  file << "###### SVMBR file version:" << endl;
  file << SVM::SVMBR_VERSION << endl;	

  file << "###### Testing time (CPU seconds): " << endl;
  sprintf(tmp,"%3.2f",crono.getElapsedTime());
  file << tmp << endl;
  
  file << "###### Kernel used:" << endl;
  file << kernel->getkernelType() << endl;
	
  file << "###### Bias:" << endl;
  file << bias << endl;

  file << "##### Number of input testing patterns: " << endl;
  file << data->getnumExamples() << endl; 

  sprintf(tmp,"%02.4f%%",100.0*numCorrect/data->getnumExamples());
  file << "##### Number of well classified patterns: (" << tmp << ")" << endl; 
  file << numCorrect << endl;

  file << "##### Mean square error:" << endl;
  file << mse << endl;

  file << "##### Outputs:" << endl;

  INT dataSize = data->getnumExamples();
  for(i = 0 ; i < dataSize ; i++)
    file << evalTarget[i] << endl;

  /*for(i = 0 ; i < data->getnumExamples() ; i++){
       if(data->gettarget(i)>0)
			file << "+1 ";
		else
			file << "-1 ";
  Bpenna wrote: Para que serve isso acima???
  
    file << evalTarget[i] << endl;
  }*/

	file.close();
	return TRUE; 
	
  //## end TestSVM::saveResult%998880795.body
}

INT TestSVM::loadSVM ()
{
  //## begin TestSVM::loadSVM%999139402.body preserve=yes
	return loadSVM(svmFile);
  //## end TestSVM::loadSVM%999139402.body
}

INT TestSVM::loadSVM (CHAR *filen)
{
  //## begin TestSVM::loadSVM%999139403.body preserve=yes
  #define MAX_LEN 120

  INT i, j, kt;
	REAL p1, p2, p3;
	CHAR str[MAX_LEN];

  ifstream file(svmFile);

  if (!file.is_open()) {
    cerr << "(TrainSVM) Error when opening " << svmFile << endl;
    exit(1);
  }

	cout << "Loading svm file" << endl << flush;
  // file version: not used yet
	file.getline(str,MAX_LEN,'\n');
  file.getline(str,MAX_LEN,'\n');

  if(strncmp(str,SVM::SVMBR_VERSION,strlen(SVM::SVMBR_VERSION)) != 0){
      cout  << "Wrong file version. Version must be igual to "
						<< SVM::SVMBR_VERSION << endl;
      return FALSE;
  }
			
  // training time: not used
	file.getline(str,MAX_LEN,'\n');
	file.getline(str,MAX_LEN,'\n');

  // iterations: not used
  file.getline(str,MAX_LEN,'\n');
	file.getline(str,MAX_LEN,'\n');

  // data type
	file.getline(str,MAX_LEN,'\n');
  file >> i;
  file.getline(str,MAX_LEN,'\n'); // remove return

	// data type
	if(i == Data::NORMAL)
		data = new NormalData(trainPattFile);
	else if(i == Data::SPARSE)
		data = new SparseData(trainPattFile);
	else if(i == Data::BINARY)
		data = new BinaryData(trainPattFile);

	dim = data->getdim();

  // kernel used
	file.getline(str,MAX_LEN,'\n');
  file >> kt;
  file.getline(str,MAX_LEN,'\n');

  // upper limit for Lagrange multipliers 
  file.getline(str,MAX_LEN,'\n');
	file.getline(str,MAX_LEN,'\n');

  // kernel parameter 1
  file.getline(str,MAX_LEN,'\n');
	file >> p1;
	file.getline(str,MAX_LEN,'\n');

  // kernel parameter 2
  file.getline(str,MAX_LEN,'\n');
	file >> p2;
	file.getline(str,MAX_LEN,'\n');

  // kernel parameter 3
  file.getline(str,MAX_LEN,'\n');
	file >> p3;
	file.getline(str,MAX_LEN,'\n');

	// setting kernel
	if(kt == Kernel::RBF)
		kernel = new RBFKernel(p1,p2,p3,data);
	else if(kt == Kernel::POLY)
		kernel = new PolyKernel(p1,p2,p3,data);
	else if(kt == Kernel::LINEAR)
		kernel = new LinearKernel(p1,p2,p3,data);
	else if(kt == Kernel::SIGMOID)
		kernel = new SigmoidKernel(p1,p2,p3,data);
	
  // tolerance
  file.getline(str,MAX_LEN,'\n');
	file.getline(str,MAX_LEN,'\n');
  
  // epsilon
  file.getline(str,MAX_LEN,'\n');
	file.getline(str,MAX_LEN,'\n');

  // bias
  file.getline(str,MAX_LEN,'\n');
	file >> bias;
	file.getline(str,MAX_LEN,'\n');
 
  // norm: not used
  file.getline(str,MAX_LEN,'\n');
	file.getline(str,MAX_LEN,'\n');

  // number of suport vectors
  file.getline(str,MAX_LEN,'\n');
	file >> numSV;
	file.getline(str,MAX_LEN,'\n');

  // number of unbounded SVs
  file.getline(str,MAX_LEN,'\n');
	file.getline(str,MAX_LEN,'\n');

	try {
		supVec = new REAL *[numSV];
		for( i = 0 ; i < numSV ; i++)
			supVec[i] = new REAL [dim];
		supVecTarget = new REAL [numSV];
	}
	catch(...){
		cerr << "Could not allocate memory (TestSVM)" << endl;
		exit(1);
	}

	// support vectors
	file.getline(str,MAX_LEN,'\n');

	for(i = 0 ; i < numSV ; i++){
		for (j = 0; j < dim; j++)
			file >> supVec[i][j];
		file >> supVecTarget[i];
		file.getline(str,MAX_LEN,'\n'); // remove return
	}
  
  // reading lagrange multiplier
	try {
		alpha = new REAL [numSV];
	}
	catch(...){
		cerr << "Could not allocate memory (TestSVM)" << endl;
		exit(1);
	}

	file.getline(str,MAX_LEN,'\n');
  for(i = 0 ; i < numSV ; i++){
		file >> alpha[i];
		file.getline(str,MAX_LEN,'\n');
  }
/*
  for(i = 0 ; i < numSupVec ; i++){
    getNextLine(file,str);
    istrstream line(str.c_str());
    line >> tmp;
    index.push_back(tmp);
  }
*/

	try{
		evalTarget = new REAL [data->getnumExamples()];
	}
	catch(...){
		cerr << "Could not allocate memory (TestSVM::TestSVM)";
		exit(1);
	}


  file.close();
  return TRUE;


  //## end TestSVM::loadSVM%999139403.body
}

VOID TestSVM::evalAllOutputs ()
{
  //## begin TestSVM::evalAllOutputs%999139406.body preserve=yes
  cout << "Starting svm evaluating." << endl << flush;
	numCorrect = 0;

  INT dataSize = data->getnumExamples();
  for(INT i = 0 ; i < dataSize ; i++){
    evalOutput(i);
		//cout << "[" << i << "] " << evalTarget[i] << endl;
		REAL target = data->gettarget(i);
    if(SIGN(evalTarget[i]) == target)
      numCorrect++;
    else {
      if(limits == SVM::HARD_LIMIT)
				mse +=	1;
      else
				mse +=	(evalTarget[i]-target)*(evalTarget[i]-target);
    }
  }

  if(dataSize - numCorrect)
    mse /= (dataSize - numCorrect);
	else {
		cout << "All examples were correctly classified" << endl;
		mse = 0;
	}
	/*
	cout  << "\nNum Corrected.: " << numCorrect << endl 
				<< "Total.........: " <<	data->getnumExamples() << endl << flush;
	printf("Percentage....: %02.2f%%\n\n",100.0*numCorrect/data->getnumExamples());
    */

  //## end TestSVM::evalAllOutputs%999139406.body
}

REAL TestSVM::evalOutput (UINT i1)
{
  //## begin TestSVM::evalOutput%999139408.body preserve=yes

  INT i;
  REAL tmp = 0;

  for(i = 0; i < numSV ; i++)
    tmp += supVecTarget[i]*alpha[i]*kernel->evalKernel(supVec[i],i1);

  tmp -= bias;
	evalTarget[i1] = tmp;
			  		
  return tmp;
	

  //## end TestSVM::evalOutput%999139408.body
}

// Additional Declarations
  //## begin TestSVM%3B87FDD9007B.declarations preserve=yes
  //## end TestSVM%3B87FDD9007B.declarations

//## begin module%3B8825AC005C.epilog preserve=yes
//## end module%3B8825AC005C.epilog
