//## begin module%3B890A340188.cm preserve=no
//	  %X% %Q% %Z% %W%
//## end module%3B890A340188.cm

//## begin module%3B890A340188.cp preserve=no
//	+----------------------------------------------+
//	| SVMBR - A program for training SVMs          |
//	+----------------------------------------------+
//	| * Creator, modeling, coding:                 |
//	|    Marcelo Barros de Almeida                 |
//	|    barros@smar.com.br                        |
//	|    http://litc.cpdee.ufmg.br/~barros/        |
//	| * Coding, improvements, bug fixes:           |
//	|    Bernardo Penna                            |
//	|    bprc@brfree.com.br                        |
//	+----------------------------------------------+
//
//	 Copyright(c) 2002 by Marcelo Barros de Almeida
//	                            All rights reserved
//
//	-[CVS]------------------------------------------
//	$Author$
//	$Date$
//	$Source$
//	$Name$
//	$Revision$
//	------------------------------------------------
//## end module%3B890A340188.cp

//## Module: lineargs%3B890A340188; Package body
//## Subsystem: <Top Level>
//## Source file: D:\textotese\svmbr\src\lineargs.cpp

//## begin module%3B890A340188.additionalIncludes preserve=no
//## end module%3B890A340188.additionalIncludes

//## begin module%3B890A340188.includes preserve=yes
//## end module%3B890A340188.includes

// lineargs
#include "lineargs.h"
//## begin module%3B890A340188.declarations preserve=no
//## end module%3B890A340188.declarations

//## begin module%3B890A340188.additionalDeclarations preserve=yes
//## end module%3B890A340188.additionalDeclarations


// Class LineArgs 























LineArgs::LineArgs (INT argn, CHAR **argv)
  //## begin LineArgs::LineArgs%998880759.hasinit preserve=no
  //## end LineArgs::LineArgs%998880759.hasinit
  //## begin LineArgs::LineArgs%998880759.initialization preserve=yes
  //## end LineArgs::LineArgs%998880759.initialization
{
  //## begin LineArgs::LineArgs%998880759.body preserve=yes

	if (checkLineArgs(argn,argv))
		printSummary();
	else {
		exit(0);
	}
  
  //## end LineArgs::LineArgs%998880759.body
}


LineArgs::~LineArgs()
{
  //## begin LineArgs::~LineArgs%.body preserve=yes
	for (INT i = 0; i < (numClasses > 2 ? numClasses : 1); i++)
		delete params[i];
	delete params;
  //## end LineArgs::~LineArgs%.body
}



//## Other Operations (implementation)
INT LineArgs::checkLineArgs (INT argn, CHAR **argv)
{
  //## begin LineArgs::checkLineArgs%998880760.body preserve=yes

	return loadParams(argn,argv);

  //## end LineArgs::checkLineArgs%998880760.body
}

INT LineArgs::loadParams (INT argn, CHAR **argv)
{
  //## begin LineArgs::loadParams%998880764.body preserve=yes


	INT i;

	// running without arguments ? print help.
	if(argn == 1){
		printHelp(argv[0]);
		return FALSE;
	}

	// Using a file with all parameters or commando line ?
	if(strncmp(argv[1],"-",1)!=0){
		strncpy(argsFile,argv[1],_MAX_PATH-1);
    return readFile(argsFile);
	}

	// allocates only one parameter structure
	try {
	  params = new class_arguments *[1];
	  params[0] = new class_arguments;
	}
	catch(...){
	  cerr << "Could not allocate memory (LineArgs::loadParams)" << endl;
	  exit(1);
	}

	numClasses = 2;
	setDefaultValues(0);

	// command line
  for(i = 1 ; i < argn; i++){
    if(strcmp(argv[i],"-k")==0){
      if(strcmp(argv[++i],"rbf")==0)
				params[0]->kernelType = Kernel::RBF;
      else if(strcmp(argv[i],"poly")==0)
				params[0]->kernelType = Kernel::POLY;		
      else if(strcmp(argv[i],"perceptron")==0)
				params[0]->kernelType = Kernel::SIGMOID;		
      else if(strcmp(argv[i],"linear")==0)
				params[0]->kernelType = Kernel::LINEAR;
      else {
				cerr << "Kernel " << argv[i] << " is unknown." << endl <<
							"Type " << argv[0] << " -h for a list with the "
							"implemented kernels." <<  endl;
							exit(1);
      }
    }
    else if(strcmp(argv[i],"-c")==0){
      params[0]->C = (REAL) atof(argv[++i]);
    }
    else if(strcmp(argv[i],"-p1")==0){
      params[0]->p1 = (REAL) atof(argv[++i]);
    }
    else if(strcmp(argv[i],"-p2")==0){
      params[0]->p2 = (REAL) atof(argv[++i]);
    }
    else if(strcmp(argv[i],"-p3")==0){
      params[0]->p3 = (REAL) atof(argv[++i]);
    }		
    else if(strcmp(argv[i],"-t")==0){
      params[0]->tol = (REAL) atof(argv[++i]);
    }
    else if(strcmp(argv[i],"-e")==0){
      params[0]->epsilon = (REAL) atof(argv[++i]);
    }
    else if(strcmp(argv[i],"-l")==0){
      params[0]->limit = (SHORT) atoi(argv[++i]);
    }
    else if(strcmp(argv[i],"-edrpower")==0){
      params[0]->EDRPower = (REAL) atof(argv[++i]);
    }
    else if(strcmp(argv[i],"-edr")==0){
      params[0]->numEDR = (INT) atoi(argv[++i]);
	  solverMethod  = Solver::EDR;
    }
	else if(strcmp(argv[i],"-maxiter")==0){
      params[0]->maxIter = (INT) atoi(argv[++i]);
    //solverMethod should be ???
	}
	else if(strcmp(argv[i],"-solver")==0){
	  if(strcmp(argv[++i],"edr")==0)
	    solverMethod = Solver::EDR;
	  else if(strcmp(argv[++i],"smo")==0)
	    solverMethod = Solver::SMO;
	  else if(strcmp(argv[++i],"boosting")==0)
	    solverMethod = Solver::BOOSTING;
	}
    else if(strcmp(argv[i],"-chunk")==0){
      chunkSize = (INT) atoi(argv[++i]);
	}
	else if(strcmp(argv[i],"-d")==0){
      if(strcmp(argv[++i],"normal")==0)
				dataType = Data::NORMAL;
      else if(strcmp(argv[i],"sparse")==0)
				dataType = Data::SPARSE;
      else if(strcmp(argv[i],"binary")==0)
				dataType = Data::BINARY;
    }
    else if(strcmp(argv[i],"-tp")==0){
      strncpy(trainPattFile,argv[++i],_MAX_PATH-1);
    }
    else if(strcmp(argv[i],"-gp")==0){
      strncpy(testPattFile,argv[++i],_MAX_PATH-1);
    }
    else if(strcmp(argv[i],"-sv")==0){
      strncpy(params[0]->svmFile,argv[++i],_MAX_PATH-1);
    }
//  else if(strcmp(argv[i],"-to")==0){
//    strncpy(trainOutFile,argv[++i],_MAX_PATH-1);
//  }
    else if(strcmp(argv[i],"-go")==0){
      strncpy(testOutFile,argv[++i],_MAX_PATH-1);
    }
    else if(strcmp(argv[i],"-h")==0){
      printHelp(argv[0]);
      return FALSE;
    }
    else if(strcmp(argv[i],"-version")==0){
      cout << "SVMBR version " << SVM::SVMBR_VERSION << endl << flush;			
      return FALSE;
    }		
  }
   
  return TRUE;

  //## end LineArgs::loadParams%998880764.body
}

VOID LineArgs::setDefaultValues (INT i)
{
  //## begin LineArgs::setDefaultValues%998880762.body preserve=yes

  params[i]->epsilon	  = (REAL) 0.001;
  params[i]->tol		  = (REAL) 0.001;
  params[i]->kernelType   = Kernel::LINEAR;
  params[i]->C            = 1;
  params[i]->p1			  = 1;
  params[i]->p2	          = 0;
  params[i]->p3           = 1;  // scale factor used
  params[i]->numEDR       = -1; // without EDR
  params[i]->maxIter      = -1; // loop until convergence
  params[i]->limit        = SVM::SOFT_LIMIT;
  params[i]->svmFile[0]   = '\0';
  params[i]->EDRPower     = 2; // use a quadratic comparison function
  
  dataType                = Data::NORMAL; 
  solverMethod            = Solver::SMO;
  trainPattFile[0]        = '\0';
  testPattFile[0]         = '\0';
//trainOutFile[0]         = '\0';
  testOutFile[0]          = '\0';
  chunkSize               = -1;//number of vectors

  //## end LineArgs::setDefaultValues%998880762.body
}

VOID LineArgs::printHelp (CHAR *prog)
{
  //## begin LineArgs::printHelp%998880763.body preserve=yes

  cout << "Usage:(1 of 2)\n\n" << prog << " -option1 value1 -option2 value2 ...\n\n" ;
  cout << "Options (type in lower case !):\n\n";
  cout << 
"KERNELS:                                                                     \n"
"=======                                                                      \n"
"-k [poly|rbf|perceptron|linear]  | Chooses the SVM kernel.                   \n"
"                                 | Kernel expressions:                       \n"
"                                 | rbf       : exp(-|x1-x2|^2/(2*p3*p1^2))   \n"
"                                 | poly      : ((x1*x2+p2)/p3)^p1            \n"
"                                 | perceptron: tanh(p1*x1*x2/p3+p2)          \n"
"                                 | linear    : (x1*x2+p2)/p3                 \n"
"                                 |                                           \n"
"-p1 value                        | Kernel parameters.                        \n"
"-p2 value                        | For RBF, p1 is the variance and p2        \n"
"-p3 value                        | is not used. For polynomial p1 is the     \n"
"                                 | degree, p2 is bias. Perceptron has p1 as  \n"
"                                 | slope and p2 as offset. Linear uses p2 as \n"
"                                 | bias. Scale parameter is provided for     \n"
"                                 | all kernels by p3 parameter.              \n" 
"                                 | Default: p1=1, p2=0, p3=1.                \n"
"                                 |                                           \n"
"FILES:                                                                       \n"
"=====                                                                        \n"
"-tp filename                     | Training patterns.                        \n"
"                                 | First two numbers specifies the amount of \n"
"                                 | training vectors and the input vector,    \n"
"                                 | dimension, respectively. After these two  \n"
"                                 | values, other points are considered as    \n"
"                                 | data. Put one data vector per row,        \n"
"                                 | followed by its target. Numbers must be   \n"
"                                 | separated by spaces. For instance, dataset\n"
"                                 | for XOR problem is stated as (please,     \n"
"                                 | remove comments):                         \n"
"                                 | ----------------------------------------  \n"
"                                 | 4       // number of training vector      \n" 
"                                 | 2       // input dimension                \n"
"                                 | 0 0 -1  // vector 1 + target              \n"
"                                 | 0 1  1  // vector 2 + target              \n"
"                                 | 1 0  1  // vector 3 + target              \n"
"                                 | 1 1 -1  // vector 4 + target              \n"
"                                 | ----------------------------------------  \n"
"                                 |                                           \n"
"-gp filename                     | Testing patterns (generalization).        \n"
"                                 | First two numbers specifies the amount of \n"
"                                 | testing vectors and the input vector,     \n"
"                                 | dimension, respectly. After these two     \n"
"                                 | values, other points are considered as    \n"
"                                 | data. Put one data vector per row,        \n"
"                                 | followed by its target. Numbers must be   \n"
"                                 | separated by spaces. For instance, dataset\n"
"                                 | for XOR problem is stated as (please,     \n"
"                                 | remove comments):                         \n"
"                                 | ----------------------------------------  \n"
"                                 | 4       // number of training vector      \n" 
"                                 | 2       // input dimension                \n"
"                                 | 0 0 -1  // vector 1 + target              \n"
"                                 | 0 1  1  // vector 2 + target              \n"
"                                 | 1 0  1  // vector 3 + target              \n"
"                                 | 1 1 -1  // vector 4 + target              \n"
"                                 | ----------------------------------------  \n"
"                                 |                                           \n"
"-go filename                     | Testing output file.                      \n"
"                                 | Generalization results are saved on this  \n"
"                                 | file, like SVM's output and MSE.          \n"
"                                 |                                           \n"
"-sv filename                     | SVM configuration.                        \n"
"                                 | This file describes the SVM structure. It \n"
"                                 | is the output file from SVMBR.            \n"
"                                 |                                           \n"
"-d [normal|sparse|binary]        | Data representation.                      \n"
"                                 | This parameter affects how the vectors are\n"
"                                 | treated by the program. Set sparse when   \n"
"                                 | using training vector built mainly by     \n"
"                                 | zeros and binary when input vectors have  \n"
"                                 | only 0 and 1 values.                      \n"
"                                 | Default: d=normal                         \n"
"                                 |                                           \n"
"OPTIMIZATION:                                                                \n"
"============                                                                 \n"
"-solver [edr|smo|boosting]       | Method used to find the solution.         \n"
"                                 | Default: solver = smo                     \n"
"                                 |                                           \n" 
"-c value                         | Upper limit for Lagrange multipliers.     \n"
"                                 | Default: c=1                              \n"
"                                 |                                           \n"
"-t value                         | Tolerance for KKT condition.              \n"
"                                 | Default: t=0.001                          \n"
"                                 |                                           \n"
"-e value                         | Epsilon for bound precision.              \n"
"                                 | Default: e=0.001                          \n"
"                                 |                                           \n"
"-edr value                       | Error dependent repetition.               \n"
"                                 | Number of scans performed on the training \n"
"                                 | set.                                      \n"
"                                 | Default: edr = -1 (off)                   \n"
"                                 |                                           \n"	
"-power value                     | Power for comparison function in EDR.     \n"
"                                 | (err_i)^power > j*err_max/edr.            \n"
"                                 | Default: power = 1                        \n"
"                                 |                                           \n"	
"-maxiter value                   | Maximum number of iterations of SMO when  \n"
"                                 | using together with EDR.                  \n"
"                                 | Default: maxiter = -1                     \n"
"                                 |                                           \n"
"-chunk value                     | Chunking size.                            \n"
"                                 | Number of vectors in chunk.               \n"
"                                 | Default: chunk = -1 (not using chunk).    \n"
"                                 |                                           \n"
    "OTHERS:                                                                      \n"
    "======                                                                       \n"    
    "-l [0|1]                         | Hard (1) or soft limit (0) when           \n"
    "                                 | evaluating output.                        \n"
    "                                 | Default: l=0                              \n"
    "                                 |                                           \n"
//  "-v                               | Verbosity level.                          \n"
//  "                                 | Default: v=0                              \n"
//  "                                 |                                           \n"
    "-version                         | Print the current version.                \n"
    "                                 |                                           \n"
    "-h                               | Print this help message.                  \n"
    "                                 |                                           \n"
    << endl << flush;

  cout << "Usage:(2 of 2)\n\n" << prog << " file_with_all_parameters\n\n" ;
  cout << "Options (type in upper case !):\n\n";
  cout << 
"[GLOBAL]                                                                     \n"
"NUMCLASSES=value                 | Number of classes.                        \n"
"                                 | Default: 2                                \n"
"                                 |                                           \n"
"DATAREPRESENTATION=              | Data representation.                      \n"
"[normal|sparse|binary]           | This parameter affects how the vectors are\n"
"                                 | treated by the program. Set sparse when   \n"
"                                 | using training vector built mainly by     \n"
"                                 | zeros and binary when input vectors have  \n"
"                                 | only 0 and 1 values.                      \n"
"                                 | Default: d=normal                         \n"
"                                 |                                           \n"   
"SOLUTIONMETHOD=[edr|smo|boosting]| Method used to find the solution.         \n"
"                                 | Default: smo                              \n"
"                                 |                                           \n"
"CHUNKSIZE=value                  | Chunking size.                            \n"
"                                 | Number of vectors in chunk.               \n"
"                                 | Default: chunk = -1 (not using chunk).    \n"
"                                 |                                           \n"
"TRAININGPATTERNFILE=filename     | Training patterns.                        \n"
"                                 | First two numbers specifies the amount of \n"
"                                 | training vectors and the input vector,    \n"
"                                 | dimension, respectively. After these two  \n"
"                                 | values, other points are considered as    \n"
"                                 | data. Put one data vector per row,        \n"
"                                 | followed by its target. Numbers must be   \n"
"                                 | separated by spaces. For instance, dataset\n"
"                                 | for XOR problem is stated as (please,     \n"
"                                 | remove comments):                         \n"
"                                 | ----------------------------------------  \n"
"                                 | 4       // number of training vector      \n" 
"                                 | 2       // input dimension                \n"
"                                 | 0 0 -1  // vector 1 + target              \n"
"                                 | 0 1  1  // vector 2 + target              \n"
"                                 | 1 0  1  // vector 3 + target              \n"
"                                 | 1 1 -1  // vector 4 + target              \n"
"                                 | ----------------------------------------  \n"
"                                 | For multi class problems, the targets are \n"
"                                 | represented by 0,1,2,3... For instance:   \n"
"                                 | 6       // number of training vector      \n" 
"                                 | 2       // input dimension                \n"
"                                 | 0 7 0   // vector 1 + target              \n"
"                                 | 2 1 0   // vector 2 + target              \n"
"                                 | 4 4 1   // vector 3 + target              \n"
"                                 | 3 1 2   // vector 4 + target              \n" 
"                                 | 2 4 0   // vector 5 + target              \n"
"                                 | 7 6 1   // vector 6 + target              \n"
"                                 |                                           \n"
"TESTINGPATTERNFILE=filename      | Testing patterns (generalization).        \n"
"                                 | First two numbers specifies the amount of \n"
"                                 | testing vectors and the input vector,     \n"
"                                 | dimension, respectively. After these two  \n"
"                                 | values, other points are considered as    \n"
"                                 | data. Put one data vector per row,        \n"
"                                 | followed by its target. Numbers must be   \n"
"                                 | separated by spaces. For instance, dataset\n"
"                                 | for XOR problem is stated as (please,     \n"
"                                 | remove comments):                         \n"
"                                 | ----------------------------------------  \n"
"                                 | 4       // number of training vector      \n" 
"                                 | 2       // input dimension                \n"
"                                 | 0 0 -1  // vector 1 + target              \n"
"                                 | 0 1  1  // vector 2 + target              \n"
"                                 | 1 0  1  // vector 3 + target              \n"
"                                 | 1 1 -1  // vector 4 + target              \n"
"                                 | ----------------------------------------  \n"
"                                 | For multi class problems, the targets are \n"
"                                 | represented by 0,1,2,3... For instance:   \n"
"                                 | 6       // number of training vector      \n" 
"                                 | 2       // input dimension                \n"
"                                 | 0 7 0   // vector 1 + target              \n"
"                                 | 2 1 0   // vector 2 + target              \n"
"                                 | 4 4 1   // vector 3 + target              \n"
"                                 | 3 1 2   // vector 4 + target              \n" 
"                                 | 2 4 0   // vector 5 + target              \n"
"                                 | 7 6 1   // vector 6 + target              \n"
"                                 |                                           \n"	
"TESTINGOUTFILE=filename          | Testing output file.                      \n"
"                                 | Generalization results are saved on this  \n"
"                                 | file, like SVM's output and MSE.          \n"
"                                 |                                           \n"
"                                                                             \n"
"[GLOBAL] or [CLASS:x]                                                        \n"
"(x is the number of the class: 0,1,2,3...)                                   \n"
"                                                                             \n"  
"SVMFILE=filename                 | SVM configuration.                        \n"
"                                 | This file describes the SVM structure. It \n"
"                                 | is the output file from SVMBR             \n"
"                                 | Only for 2 classes it is global, if not   \n"
"                                 | it is not global, it has to exist in each \n"
"                                 | class parameters.                         \n"                         
"                                                                             \n"
"[CLASS:x]                                                                    \n"
"(x is the number of the class: 0,1,2,3...)                                   \n"
"                                                                             \n"  
"KERNELTYPE=                      | Chooses the SVM kernel.                   \n"
"[poly|rbf|perceptron|linear]     | Kernel expressions:                       \n"
"                                 | rbf       : exp(-|x1-x2|^2/(2*p3*p1^2))   \n"
"                                 | poly      : ((x1*x2+p2)/p3)^p1            \n"
"                                 | perceptron: tanh(p1*x1*x2/p3+p2)          \n"
"                                 | linear    : (x1*x2+p2)/p3                 \n"
"                                 |                                           \n"
"P1=value                         | Kernel parameters.                        \n"
"P2=value                         | For RBF, p1 is the variance and p2        \n"
"P3=value                         | is not used. For polynomial p1 is the     \n"
"                                 | degree, p2 is bias. Perceptron has p1 as  \n"
"                                 | slope and p2 as offset. Linear uses p2 as \n"
"                                 | bias. Scale parameter is provided for     \n"
"                                 | all kernels by p3 parameter.              \n" 
"                                 | Default: P1=1, P2=0, P3=1.                \n"
"                                 |                                           \n"
"                                 |                                           \n"
"C=value                          | Upper limit for Lagrange multipliers.     \n"
"                                 | Default: 1                                \n"
"                                 |                                           \n"
"TOL=value                        | Tolerance for KKT condition.              \n"
"                                 | Default: 0.001                            \n"
"                                 |                                           \n"
"EPS=value                        | Epsilon for bound precision.              \n"
"                                 | Default: 0.001                            \n"
"                                 |                                           \n"
"NUMEDR=value                     | Error dependent repetition.               \n"
"                                 | Number of scans performed on the training \n"
"                                 | set.                                      \n"
"                                 | Default: -1 (off)                         \n"
"                                 |                                           \n"
"EDRPOWER=value                   | Power for comparison function in EDR.     \n"
"                                 | (err_i)^power > j*err_max/edr.            \n"
"                                 | Default: power = 2                        \n"
"                                 |                                           \n"	
"MAXITER=value                    | Maximum number of iterations of SMO when  \n"
"                                 | using together with EDR.                  \n"
"                                 | Default: -1                               \n"
"                                 |                                           \n"
"LIMIT=[0|1]                      | Hard (1) or soft limit (0) when           \n"
"                                 | evaluating output.                        \n"
"                                 | Default: l=0                              \n"
"                                 |                                           \n"
" It is important to observe these rules when using the parameters file       \n"
" - file cannot have spaces before any word                                   \n"
" - file cannot have spaces after '='                                         \n"
" - file must have return in the last line                                    \n"
" - file can have lines for comment  using # or ; at beginning                \n"
"                                                                             \n"   
	<< endl << flush;

  //## end LineArgs::printHelp%998880763.body
}

VOID LineArgs::printSummary ()
{
  //## begin LineArgs::printSummary%998880765.body preserve=yes

	cout << "Number of classes......: ";
	cout << numClasses << endl;

	cout << "Data representation....: ";
	if(dataType == Data::NORMAL)      cout << "Normal";
	else if(dataType == Data::SPARSE)	cout << "Sparse";
	else if(dataType == Data::BINARY) cout << "Binary";
	cout << endl;

	cout << "Solution method........: ";
	if(solverMethod == Solver::SMO)      cout << "SMO";
	else if(solverMethod == Solver::EDR)	cout << "EDR";
	else if(solverMethod == Solver::BOOSTING) cout << "BOOSTING";
	cout << endl;

	if(chunkSize > 0)
		cout << "Chunking size..........: " << chunkSize << endl;
	else
		cout << "Not using chunking.....:" << endl;

	if(strlen(trainPattFile))
		cout << "Training patterns......: " << trainPattFile << endl;
	if(strlen(testPattFile))
		cout << "Testing patterns.......: " << testPattFile << endl;
	if(strlen(testOutFile))
		cout << "Generalizarion results.: " << testOutFile << endl;
	
	for (INT i = 0; i < (numClasses > 2 ? numClasses : 1); i++){
	
	  if (numClasses > 2)
			cout << endl << "CLASS: " << i << endl;

	  if(strlen(params[i]->svmFile))
		cout << "SVM file...............: " << params[i]->svmFile << endl;

	  cout << "Kernel.................: ";
	  if(params[i]->kernelType == Kernel::RBF)
			cout  << "RBF [p1=" << params[i]->p1 << ",p2=" << params[i]->p2  
						<< ",p3="  << params[i]->p3 << "]" << endl;
		else if(params[i]->kernelType == Kernel::POLY)
			cout	<< "Poly [p1=" << params[i]->p1 << ",p2=" << params[i]->p2  
						<< ",p3="  << params[i]->p3 << "]" << endl;
		else if(params[i]->kernelType == Kernel::SIGMOID) 
			cout	<< "Perceptron [p1=" << params[i]->p1 << ",p2=" << params[i]->p2 
						<< ",p3="  << params[i]->p3 << "]" << endl;
		else if(params[i]->kernelType == Kernel::LINEAR)
			cout	<< "Linear [p1=" << params[i]->p1 << ",p2=" << params[i]->p2 
						<< ",p3="  << params[i]->p3 << "]" << endl;
  
	  cout << "C......................: " << params[i]->C       << endl;
	  cout << "epsilon................: " << params[i]->epsilon << endl;
	  cout << "tolerance..............: " << params[i]->tol     << endl;
	  cout << "EDR....................: " << params[i]->numEDR  << endl;
	  cout << "maxIter................: " << params[i]->maxIter << endl;
	  cout << "Limit..................: " << params[i]->limit   << endl;
	  cout << "EDR Power..............: " << params[i]->EDRPower<< endl;	
	}
	cout << endl << flush;
  //## end LineArgs::printSummary%998880765.body
}

INT LineArgs::hasTraining ()
{
  //## begin LineArgs::hasTraining%998880766.body preserve=yes

	return strlen(trainPattFile);

  //## end LineArgs::hasTraining%998880766.body
}

INT LineArgs::hasTesting ()
{
  //## begin LineArgs::hasTesting%998880767.body preserve=yes

	return strlen(testPattFile);

  //## end LineArgs::hasTesting%998880767.body
}

INT LineArgs::readFile (CHAR *filename)
{
  //## begin LineArgs::readFile%1003594006.body preserve=yes
	INT pattern,i;
	CHAR dat[40], ker[40], sol[40], classfile[_MAX_PATH];

	numClasses = _GetProfileInt (filename,(CHAR*)"GLOBAL",(CHAR*)"NUMCLASSES",2);

  try {
		i = (numClasses > 2 ? numClasses : 1);
	    params = new class_arguments *[i];
		for(pattern = 0 ; pattern < i; pattern++){
	    params[pattern] = new class_arguments;
	    setDefaultValues(pattern);
		}
	}
    catch(...){
	  cerr << "Could not allocate memory (LineArgs::readFile)" << endl;
	  exit(1);
	}

	//looking for arguments that must be global	
	_GetProfileString (filename,(CHAR*)"GLOBAL",(CHAR*)"TRAININGPATTERNFILE",(CHAR*)"",trainPattFile );
	_GetProfileString (filename,(CHAR*)"GLOBAL",(CHAR*)"TESTINGPATTERNFILE", (CHAR*)"",testPattFile);
	_GetProfileString (filename,(CHAR*)"GLOBAL",(CHAR*)"TESTINGOUTFILE", (CHAR*)"testout.txt",testOutFile);
	
	_GetProfileString (filename,(CHAR*)"GLOBAL",(CHAR*)"DATAREPRESENTATION",(CHAR*)"NOT FOUND",dat);
	_strlwr(dat);
	if(strcmp(dat,"normal")==0)
	  dataType = Data::NORMAL;
    else if(strcmp(dat,"sparse")==0)
	  dataType = Data::SPARSE;
    else if(strcmp(dat,"binary")==0)
	  dataType = Data::BINARY;
    	
	_GetProfileString (filename,(CHAR*)"GLOBAL",(CHAR*)"SOLUTIONMETHOD",(CHAR*)"NOT FOUND",sol);
	_strlwr(sol);
	if(strcmp(sol,"edr")==0)
	  solverMethod = Solver::EDR;
	else if(strcmp(sol,"smo")==0)
	  solverMethod = Solver::SMO;
	else if(strcmp(sol,"boosting")==0)
	  solverMethod = Solver::BOOSTING;
		
	chunkSize = _GetProfileInt (filename,(CHAR*)"GLOBAL",(CHAR*)"CHUNKSIZE", chunkSize);
	
	if (numClasses == 2)//for 2 classes svmFile is a global parameter
	  _GetProfileString (filename,(CHAR*)"GLOBAL",(CHAR*)"SVMFILE",(CHAR*)"svmnet.txt",params[0]->svmFile);

	//looking for arguments that may be global
	for (pattern = 0; pattern < (numClasses > 2 ? numClasses : 1); pattern++)
	{
	  _GetProfileString (filename,(CHAR*)"GLOBAL",(CHAR*)"KERNELTYPE",(CHAR*)"NOT FOUND",ker);
		_strlwr(ker);
	  if(strcmp(ker,"rbf")==0)
			params[pattern]->kernelType = Kernel::RBF;
    else if(strcmp(ker,"poly")==0)
			params[pattern]->kernelType = Kernel::POLY;		
    else if(strcmp(ker,"perceptron")==0)
			params[pattern]->kernelType = Kernel::SIGMOID;		
    else if(strcmp(ker,"linear")==0)
			params[pattern]->kernelType = Kernel::LINEAR;
	  
	  params[pattern]->limit= (SHORT) _GetProfileInt (filename,(CHAR*)"GLOBAL",(CHAR*)"LIMIT", (INT) params[pattern]->limit);
	  params[pattern]->numEDR = _GetProfileInt (filename,(CHAR*)"GLOBAL",(CHAR*)"NUMEDR",params[pattern]->numEDR);
	  params[pattern]->maxIter= _GetProfileInt (filename,(CHAR*)"GLOBAL",(CHAR*)"MAXITER",params[pattern]->maxIter);

	  params[pattern]->C= _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"C",params[pattern]->C);
	  params[pattern]->p1= _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"P1",params[pattern]->p1);
	  params[pattern]->p2= _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"P2",params[pattern]->p2);
	  params[pattern]->p3= _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"P3",params[pattern]->p3);
	  params[pattern]->epsilon= _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"EPS",params[pattern]->epsilon);
	  params[pattern]->tol= _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"TOL",params[pattern]->tol);
	  params[pattern]->EDRPower = _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"EDRPOWER",2);
	}
	
	if (numClasses > 2){
	  //looking for individual arguments
	  for (pattern = 0; pattern < numClasses; pattern++){
	    CHAR str[MAX_LEN];
	    sprintf(str,"CLASS:%d",(int)pattern);

            _GetProfileString (filename,str,(CHAR*)"KERNELTYPE",(CHAR*)"NOT FOUND",ker);
            _strlwr(ker);
            if(strcmp(ker,"rbf")==0)
                params[pattern]->kernelType = Kernel::RBF;
            else if(strcmp(ker,"poly")==0)
                params[pattern]->kernelType = Kernel::POLY;		
            else if(strcmp(ker,"perceptron")==0)
                params[pattern]->kernelType = Kernel::SIGMOID;
            else if(strcmp(ker,"linear")==0)
                params[pattern]->kernelType = Kernel::LINEAR;

            // use param[0] as default value
	    params[pattern]->limit= (SHORT) _GetProfileInt (filename,str,(CHAR*)"LIMIT", (INT) params[0]->limit);
	    params[pattern]->numEDR= _GetProfileInt (filename,str,(CHAR*)"NUMEDR",params[0]->numEDR);
	    params[pattern]->maxIter= _GetProfileInt (filename,str,(CHAR*)"MAXITER",params[0]->maxIter);
	    params[pattern]->C= _GetProfileReal (filename,str,(CHAR*)"C",params[0]->C);
	    params[pattern]->p1= _GetProfileReal (filename,str,(CHAR*)"P1",params[0]->p1);
            params[pattern]->p2= _GetProfileReal (filename,str,(CHAR*)"P2",params[0]->p2);
	    params[pattern]->p3= _GetProfileReal (filename,str,(CHAR*)"P3",params[0]->p3);
	    params[pattern]->epsilon= _GetProfileReal (filename,str,(CHAR*)"EPS",params[0]->epsilon);
	    params[pattern]->tol= _GetProfileReal (filename,str,(CHAR*)"TOL",params[0]->tol);
            params[pattern]->EDRPower = _GetProfileReal (filename,(CHAR*)"GLOBAL",(CHAR*)"EDRPOWER",params[0]->EDRPower);

	    sprintf(classfile,"svmnet%02d.txt",(int)pattern);
            _GetProfileString (filename,str,(CHAR*)"SVMFILE",classfile,params[pattern]->svmFile);
	  }
        }

	return TRUE;
  //## end LineArgs::readFile%1003594006.body
}

INT LineArgs::_GetProfileInt (CHAR *filename, CHAR *section, CHAR *key, INT value)
{
  //## begin LineArgs::_GetProfileInt%1003594007.body preserve=yes

	//file cannot have spaces before any word
	//file cannot have spaces between "="
	//file cannot have a lower case by mistake
	//file must have return in the last line


	CHAR sec[80];
	CHAR str[MAX_LEN];

	//checking the file
	ifstream file(filename);
	if(!file.is_open()) {
			cerr << "_GetProfileInt error: file not found." << endl;
			exit(1);
	}

	sprintf(sec,"[%s]",section);

	//finding the section
	file.getline(str,MAX_LEN,'\n');
	while((!(_strnicmp(str,sec,strlen(sec)) == 0))&&(!file.eof()))
	  file.getline(str,MAX_LEN,'\n');

	// get next line
	file.getline(str,MAX_LEN,'\n');

	//finding the key inside section
  while((str[0] != '[')&&(!file.eof())){
		if(_strnicmp(str,key,strlen(key)) == 0){
			CHAR *token;
			token = strtok(str,"=");  
			token = strtok(NULL,"="); 
			if(token == NULL){
				// not found: return default value
				break;
			}
			else {
				// found: return number after '='
				return atoi(token);
			}
		}
		file.getline(str,MAX_LEN,'\n');
	}

	//not found: return default value
	return value;

  //## end LineArgs::_GetProfileInt%1003594007.body
}

INT LineArgs::_SetProfileInt (CHAR *filename, CHAR *section, CHAR *key, INT value)
{
  //## begin LineArgs::_SetProfileInt%1013885160.body preserve=yes
	return 0;
  //## end LineArgs::_SetProfileInt%1013885160.body
}

REAL LineArgs::_GetProfileReal (CHAR *filename, CHAR *section, CHAR *key, REAL value)
{
  //## begin LineArgs::_GetProfileReal%1003594008.body preserve=yes

	//file cannot have spaces before any word
	//file cannot have spaces between "="
	//file cannot have a lower case by mistake
	//file must have return in the last line

	CHAR sec[80];
	CHAR str[MAX_LEN];

	//checking the file
	ifstream file(filename);
	if(!file.is_open()) {
			cerr << "_GetProfileReal error: file not found." << endl;
			exit(1);
	}

	sprintf(sec,"[%s]",section);

	//finding the section
	file.getline(str,MAX_LEN,'\n');
	while((!(_strnicmp(str,sec,strlen(sec)) == 0))&&(!file.eof()))
	  file.getline(str,MAX_LEN,'\n');

	// get next line
	file.getline(str,MAX_LEN,'\n');

	//finding the key inside section
    while((str[0] != '[')&&(!file.eof())){
		if(_strnicmp(str,key,strlen(key)) == 0){
			CHAR *token;
			token = strtok(str,"=");  
			token = strtok(NULL,"="); 
			if(token == NULL){
				// not found: return default value
				break;
			}
			else {
				// found: return number after '='
				return atof(token);
			}
		}
		file.getline(str,MAX_LEN,'\n');
	}

	//not found: return default value
	return value;
  //## end LineArgs::_GetProfileReal%1003594008.body
}

INT LineArgs::_SetProfileReal (CHAR *filename, CHAR *section, CHAR *key, REAL value)
{
  //## begin LineArgs::_SetProfileReal%1013885161.body preserve=yes
	return 0;
  //## end LineArgs::_SetProfileReal%1013885161.body
}

VOID LineArgs::_GetProfileString (CHAR *filename, CHAR *section, CHAR *key, CHAR *value, CHAR *strkey)
{
  //## begin LineArgs::_GetProfileString%1003594009.body preserve=yes

	//file cannot have spaces before any word
	//file cannot have spaces between "="
	//file cannot have a lower case by mistake
	//file must have return in the last line

	CHAR sec[80];
	CHAR str[MAX_LEN];

	//checking the file
	ifstream file(filename);
	if(!file.is_open()) {
			cerr << "_GetProfileString error: file not found." << endl;
			exit(1);
	}

	sprintf(sec,"[%s]",section);

	//finding the section
	file.getline(str,MAX_LEN,'\n');
	while((!(_strnicmp(str,sec,strlen(sec)) == 0))&&(!file.eof()))
	  file.getline(str,MAX_LEN,'\n');

	// get next line
	file.getline(str,MAX_LEN,'\n');

	//finding the key inside section
  while((str[0] != '[')&&(!file.eof())){
		if(_strnicmp(str,key,strlen(key)) == 0){
			CHAR *token;
			token = strtok(str,"=");  
			token = strtok(NULL,"="); 
			if(token == NULL){
				break;
			}
			else {
				// found: return string after '='
				strcpy(strkey,token);
				return;
			}
		}
		file.getline(str,MAX_LEN,'\n');
	}

	//if did not find the key
	strcpy(strkey,value);
	return;
  //## end LineArgs::_GetProfileString%1003594009.body
}

INT LineArgs::_SetProfileString (CHAR *filename, CHAR *section, CHAR *key, CHAR *value)
{
  //## begin LineArgs::_SetProfileString%1013885162.body preserve=yes
	return 0;
  //## end LineArgs::_SetProfileString%1013885162.body
}

INT LineArgs::_strnicmp (CHAR *str1, CHAR *str2, INT n)
{
  //## begin LineArgs::_strnicmp%1004816842.body preserve=yes

	INT len1 = strlen(str1);
	INT len2 = strlen(str2);

	if(len1 == 0 || len2 == 0)
		return -1;

	INT same = 0;
	
	for(INT i = 0 ; (i < n) && (i < len1) && (i < len2) ; i++){
		CHAR ch1 = str1[i];
		CHAR ch2 = str2[i];
		if(ch1 >= 'A' && ch1 <= 'Z') ch1 = ch1 + 32;
		if(ch2 >= 'A' && ch2 <= 'Z') ch2 = ch2 + 32;
		if(ch1 != ch2) {
			same = -1;
			break;
		}
	}
	return same;	

  //## end LineArgs::_strnicmp%1004816842.body
}

CHAR * LineArgs::_strlwr (CHAR* str)
{
  //## begin LineArgs::_strlwr%1004816843.body preserve=yes

	for(UINT i = 0 ; i < strlen(str) ; i++) {
		if(str[i] >= 'A' && str[i] <= 'Z')
			str[i] = str[i] + 32;
	}

	return str;

  //## end LineArgs::_strlwr%1004816843.body
}

// Additional Declarations
  //## begin LineArgs%3B8909B000C0.declarations preserve=yes
  //## end LineArgs%3B8909B000C0.declarations

//## begin module%3B890A340188.epilog preserve=yes
//## end module%3B890A340188.epilog
