
#include "cl_boost_AdaBoost.h"

// [*] CAdaBoostTrainingEndCondition::CAdaBoostTrainingEndCondition

CAdaBoostTrainingEndCondition::CAdaBoostTrainingEndCondition()
{
	myInit(100, 1.0, -1.0);
}

CAdaBoostTrainingEndCondition::CAdaBoostTrainingEndCondition( int MaxWcAmount )
{
	myInit(MaxWcAmount, 1.0, -1.0);
}

CAdaBoostTrainingEndCondition::CAdaBoostTrainingEndCondition( int MaxWcAmount, double TrainDR, double TrainFPR )
{
	myInit(MaxWcAmount, TrainDR, TrainFPR);
}

void CAdaBoostTrainingEndCondition::myInit()
{
	myInit(100, 1.0, -1.0);
}

void CAdaBoostTrainingEndCondition::myInit( int MaxWcAmount )
{
	myInit(MaxWcAmount, 1.0, -1.0);
}

void CAdaBoostTrainingEndCondition::myInit( int MaxWcAmount, double TrainDR, double TrainFPR )
{
	this->MaxWcAmount = MaxWcAmount;
	this->TrainDR = TrainDR;
	this->TrainFPR = TrainFPR;
}

// [*] CAdaBoostParameterConfig::CAdaBoostParameterConfig

CAdaBoostParameterConfig::CAdaBoostParameterConfig()
{
	fDetermineLayerThresholdUsingValSet = false;
	SFC_BinAmount = 1000;
	SFC_FeatureSampleRate = 1.0;
	DataSetUpdatingFrequency = 200;

	fOutputTrainLogToScreen = true;
	fOutputTrainLogToLogStream = true;
}

bool CSingleFeatureClassifier::InputFromStream(istream& inStream)
{
	bool ret = true;
	if (!(inStream>>FeatureIndex)) ret = false;
	if (!(inStream>>Threshold)) ret = false;
	if (!(inStream>>Sgn)) ret = false;
	if (!(inStream>>ErrorRate)) ret = false;
	if (!(inStream>>alpha)) ret = false;
	if (!(inStream>>beta)) ret = false;
	if (!ret)
	{
		FeatureIndex = -1;
		Threshold = 0.0;
		Sgn = +1;
		ErrorRate = 1.0;
		alpha = 0.0;
		beta = 1.0;
	}
	return ret;
}

void CSingleFeatureClassifier::OutputToStream(ostream& outStream)
{
	outStream.precision(12);
	outStream<<noshowpos<<FeatureIndex;
	outStream<<'\t'<<showpos<<scientific<<Threshold;
	outStream<<'\t'<<Sgn;
	outStream<<'\t'<<showpos<<scientific<<ErrorRate;
	outStream<<'\t'<<showpos<<scientific<<alpha;
	outStream<<'\t'<<showpos<<scientific<<beta;
	outStream<<endl;
}

void CSingleFeatureClassifier::Classify(CDataSetForClassification* pDataSet, int ExampleIndex, double& DstConfidence, int& DstPrediction)
{
	double t;
	t = pDataSet->GetExampleFeature(ExampleIndex, FeatureIndex) * Sgn;
	if (t>Threshold)
	{
		DstConfidence = +alpha;
		DstPrediction = +1;
	}
	else
	{
		DstConfidence = -alpha;
		DstPrediction = -1;
	}
}

void CSingleFeatureClassifier::BuildValueHistogram(
	CDataSetForBinaryClassification* pDataSet, CBinaryClassificationExampleList& BinFlagExampleList,
	int FeatureIndex,
	CBoundedValueHistogram& dstPosValueHist, CBoundedValueHistogram& dstNegValueHist
	)
{
	int i;
	double fvMin, fvMax;

	double* pPosData = new double[BinFlagExampleList.PosList.ExampleAmount];
	for ( i = 0; i < BinFlagExampleList.PosList.ExampleAmount; ++i )
	{
		pPosData[i] = pDataSet->GetExampleFeature(BinFlagExampleList.PosList.rgExample[i].ExampleIndex, FeatureIndex);
	}
	double* pNegData = new double[BinFlagExampleList.NegList.ExampleAmount];
	for ( i = 0; i < BinFlagExampleList.NegList.ExampleAmount; ++i )
	{
		pNegData[i] = pDataSet->GetExampleFeature(BinFlagExampleList.NegList.rgExample[i].ExampleIndex, FeatureIndex);
	}

	fvMin = fvMax = 0.0;
	for ( i = 0; i < BinFlagExampleList.PosList.ExampleAmount; ++i )
	{
		if (!i || pPosData[i]<fvMin) fvMin = pPosData[i];
		if (!i || pPosData[i]>fvMax) fvMax = pPosData[i];
	}
	for ( i = 0; i < BinFlagExampleList.NegList.ExampleAmount; ++i )
	{
		if (!i || pNegData[i]<fvMin) fvMin = pNegData[i];
		if (!i || pNegData[i]>fvMax) fvMax = pNegData[i];
	}

	dstPosValueHist.SetParameters(CBoundedValueHistogram::INTERPOLATING_TYPE_SINGLE_BIN, fvMin, fvMax);
	for ( i = 0; i < BinFlagExampleList.PosList.ExampleAmount; ++i )
		dstPosValueHist.PushValue(pPosData[i], BinFlagExampleList.PosList.rgExample[i].Weight);
	dstNegValueHist.SetParameters(CBoundedValueHistogram::INTERPOLATING_TYPE_SINGLE_BIN, fvMin, fvMax);
	for ( i = 0; i < BinFlagExampleList.NegList.ExampleAmount; ++i )
		dstNegValueHist.PushValue(pNegData[i], BinFlagExampleList.NegList.rgExample[i].Weight);

	delete[] pPosData;
	delete[] pNegData;
}

void CSingleFeatureClassifier::TrainByCertainFeature(
	CDataSetForBinaryClassification* pDataSet, CBinaryClassificationExampleList& TrainExampleList,
	const CAdaBoostParameterConfig& Parameters, int FeatureIndex
	)
{
	int i;

	double CurrentPosWeightSum;
	double CurrentNegWeightSum;
	double CurrentThreshold;
	double CurrentErrorLoss;
	double tw;

	CBoundedValueHistogram PosHist;
	CBoundedValueHistogram NegHist;

	double* tpb = new double [Parameters.SFC_BinAmount];
	double* tnb = new double [Parameters.SFC_BinAmount];
	PosHist.myInit(Parameters.SFC_BinAmount, tpb);
	NegHist.myInit(Parameters.SFC_BinAmount, tnb);

	BuildValueHistogram(pDataSet,TrainExampleList,FeatureIndex,PosHist,NegHist);

	this->FeatureIndex = FeatureIndex;

	CurrentPosWeightSum = CurrentNegWeightSum = 0.0;
	this->Threshold = PosHist.GetThreshold_SmallerThan(0);

	if (TrainExampleList.PosList.TotalExampleWeight < TrainExampleList.NegList.TotalExampleWeight)
	{
		this->ErrorRate = TrainExampleList.PosList.TotalExampleWeight;
		this->Sgn = -1;
	}
	else
	{
		this->ErrorRate = TrainExampleList.NegList.TotalExampleWeight;
		this->Sgn = +1;
	}

	for ( i = 0; i < PosHist.BinAmount; ++i )
	{
		CurrentThreshold = PosHist.GetThreshold_SmallerThan(i+1);
		CurrentPosWeightSum += PosHist.pBinWeight[i];
		CurrentNegWeightSum += NegHist.pBinWeight[i];

		tw = TrainExampleList.NegList.TotalExampleWeight - CurrentNegWeightSum;
		CurrentErrorLoss = CurrentPosWeightSum + (tw<0 ? 0 : tw);
		if (CurrentErrorLoss < this->ErrorRate)
		{
			this->ErrorRate = CurrentErrorLoss;
			this->Threshold = CurrentThreshold;
			this->Sgn = +1;
		}

		tw = TrainExampleList.PosList.TotalExampleWeight - CurrentPosWeightSum;
		CurrentErrorLoss = CurrentNegWeightSum + (tw<0 ? 0 : tw);
		if (CurrentErrorLoss < this->ErrorRate)
		{
			this->ErrorRate = CurrentErrorLoss;
			this->Threshold = CurrentThreshold;
			this->Sgn = -1;
		}
	}

	if (this->Sgn == -1)
	{
		this->Threshold = 0.0 - this->Threshold;
	}

	if (this->ErrorRate<0) this->ErrorRate = EPS;

	if (TrainExampleList.GetTotalExampleWeight() > EPS)
		this->ErrorRate /= TrainExampleList.GetTotalExampleWeight();

	this->beta = (1.0-this->ErrorRate)/this->ErrorRate;
	this->alpha = +log(this->beta) * 0.5;

	delete[] tpb;
	delete[] tnb;
}

void CSingleFeatureClassifier::Train(
	CDataSetForBinaryClassification* pDataSet, CBinaryClassificationExampleList& TrainExampleList,
	const CAdaBoostParameterConfig& Parameters,
	ostream& oLog)
{
	int i,ir,fIsFirst;

	int FeatureCountRemainToTest;
	CSingleFeatureClassifier CurrentWeakClassifier;

	//srcTrainData.StartNextRound();

	FeatureCountRemainToTest = (int)(pDataSet->GetFeatureDimension()*Parameters.SFC_FeatureSampleRate+0.5);
	if (FeatureCountRemainToTest<1)
		FeatureCountRemainToTest = 1;

	//ir = 748;
	ir = pDataSet->GetValidFeatureAmount();
	if (FeatureCountRemainToTest > ir)
		FeatureCountRemainToTest = ir;

	TrainExampleList.ComputeTotalExampleWeight();

	for ( fIsFirst = i = 0; i < pDataSet->GetFeatureDimension(); ++i )
	{
		//if (i % 254 < 210) continue;
		if (!pDataSet->IsFeatureValid(i)) continue;

		if (CMyRandom::NextInt(ir--) >= FeatureCountRemainToTest)
			continue;
		else
			--FeatureCountRemainToTest;

		CurrentWeakClassifier.TrainByCertainFeature(pDataSet,TrainExampleList,Parameters,i);
		if (!(fIsFirst++) || CurrentWeakClassifier.ErrorRate<this->ErrorRate)
		{
			*this = CurrentWeakClassifier;
			//this->RealFeatureID = pDataSet.GetFeatureRealID(this->RandomFeatureID);
		}
	}
	if (pDataSet->GetFeatureDimension()!=0 && fIsFirst==0)
	{
		if (Parameters.fOutputTrainLogToScreen)
			oLog<<"Weak Learner DOWN!!!"<<endl;
		//for (;;);
	}
}

double GetKthSmallestWeight(vector<double>& rgValue, int K)
{
	int L,R,head,tail;
	double key;
	double tn;

	L = 0; R = (int)(rgValue.size()-1);
	if (K<L) K = L;
	if (K>R) K = R;
	while (L<=R)
	{
		key = rgValue[K];

		head = L;
		tail = R;
		for (;;)
		{
			while (head<=tail && rgValue[head]<key) ++head;
			while (head<=tail && rgValue[tail]>key) --tail;
			if (head<=tail)
			{
				tn = rgValue[head];
				rgValue[head] = rgValue[tail];
				rgValue[tail] = tn;
				++head;
				--tail;
			}
			else
				break;
		}
		if (tail<K)
			L = head;
		if (head>K)
			R = tail;
	}
	return rgValue[K];
}

// [*] CAdaBoostTrainingResultForPosNegExampleList

void CAdaBoostTrainingResultForPosNegExampleList::myInit(CBinaryClassificationExampleList& PosNegExampleList)
{
	int i;

	PosNegExampleList.ComputeTotalExampleWeight();
	
	_PosExampleAmount = PosNegExampleList.PosList.ExampleAmount;
	_PosExampleSumWeight = PosNegExampleList.PosList.TotalExampleWeight;
	rgPosExampleIndex.resize(_PosExampleAmount);
	rgPosExampleConfidence.resize(_PosExampleAmount);
	rgPosExampleOriginalWeight.resize(_PosExampleAmount);
	for ( i = 0; i < _PosExampleAmount; ++i )
	{
		rgPosExampleIndex[i] = PosNegExampleList.PosList.rgExample[i].ExampleIndex;
		rgPosExampleConfidence[i] = 0.0; rgPosExampleOriginalWeight[i] = PosNegExampleList.PosList.rgExample[i].Weight;
	}

	_NegExampleAmount = PosNegExampleList.NegList.ExampleAmount;
	_NegExampleSumWeight = PosNegExampleList.NegList.TotalExampleWeight;
	rgNegExampleIndex.resize(_NegExampleAmount);
	rgNegExampleConfidence.resize(_NegExampleAmount);
	rgNegExampleOriginalWeight.resize(_NegExampleAmount);
	for ( i = 0; i < _NegExampleAmount; ++i )
	{
		rgNegExampleIndex[i] = PosNegExampleList.NegList.rgExample[i].ExampleIndex;
		rgNegExampleConfidence[i] = 0.0; rgNegExampleOriginalWeight[i] = PosNegExampleList.NegList.rgExample[i].Weight;
	}

	ClassificationResult_ByPrediction.Clear();
	ClassificationResult_ByConfidence.Clear();

	CascadeDR = CascadeFPR = 0.0;
}

void CAdaBoostTrainingResultForPosNegExampleList::RunTest(double CascadeThreshold)
{
	int j;
	double tWeight, tConfidence;

	ClassificationResult_ByPrediction.Clear();
	ClassificationResult_ByConfidence.Clear();

	CascadeDR = 0.0;
	CascadeFPR = 0.0;

	for ( j = 0; j < _PosExampleAmount; ++j )
	{
		tConfidence = rgPosExampleConfidence[j];
		tWeight = rgPosExampleOriginalWeight[j];
		ClassificationResult_ByPrediction.NewTest(+1, (tConfidence>0 ? +1 : -1), tWeight);
		ClassificationResult_ByConfidence.Pos.PushBack(tConfidence);
		CascadeDR += (tConfidence>CascadeThreshold ? tWeight : 0.0);
	}
	if (_PosExampleSumWeight>EPS) CascadeDR /= _PosExampleSumWeight;
	//if (_PosExampleAmount>0) CascadeDR /= _PosExampleAmount;

	for ( j = 0; j < _NegExampleAmount; ++j )
	{
		tConfidence = rgNegExampleConfidence[j];
		tWeight = rgNegExampleOriginalWeight[j];
		ClassificationResult_ByPrediction.NewTest(-1, (tConfidence>0 ? +1 : -1), tWeight);
		ClassificationResult_ByConfidence.Neg.PushBack(tConfidence);
		CascadeFPR += (tConfidence>CascadeThreshold ? tWeight : 0.0);
	}
	if (_NegExampleSumWeight>EPS) CascadeFPR /= _NegExampleSumWeight;
	//if (_NegExampleAmount>0) CascadeFPR /= _NegExampleAmount;

	ClassificationResult_ByPrediction.Analyse();
	ClassificationResult_ByConfidence.Analyse();
}

//void CAdaBoostTrainingResultForPosNegExampleList::OutputToStream(ostream& outStream)
//{
//	outStream.precision(3);
//	outStream<<" Cascade.DR/FPR = "<<setw(7)<<fixed<<100.0*CascadeDR;
//	outStream.precision(6);
//	outStream<<"/"<<setw(10)<<fixed<<100.0*CascadeFPR;
//	outStream<<endl;
//	outStream.precision(3);
//	outStream<<" TestResult: Acc = "<<setw(7)<<fixed<<100.0*(1.0-ClassificationResult_ByPrediction.ErrorRate);
//	outStream<<", DR = "<<setw(7)<<fixed<<100.0*ClassificationResult_ByPrediction.DetectionRate;
//	outStream.precision(6);
//	outStream<<", FPR = "<<setw(10)<<fixed<<100.0*ClassificationResult_ByPrediction.FalsePositiveRate;
//	outStream.precision(3);
//	outStream<<", Prec = "<<setw(7)<<fixed<<100.0*ClassificationResult_ByPrediction.Precision;
//	outStream<<endl;
//}

void CAdaBoostTrainingResultForPosNegExampleList::OutputToStream_w(ostream& outStream)
{
	outStream.precision(3);
	outStream<<" Cascade.DR/FPR = "<<setw(7)<<fixed<<100.0*CascadeDR;
	outStream.precision(6);
	outStream<<"/"<<setw(10)<<fixed<<100.0*CascadeFPR;
	outStream<<endl;
	outStream.precision(3);
	outStream<<" TestResult: Acc = "<<setw(7)<<fixed<<100.0*(1.0-ClassificationResult_ByPrediction.wErrorRate);
	outStream<<", DR = "<<setw(7)<<fixed<<100.0*ClassificationResult_ByPrediction.wDetectionRate;
	outStream.precision(6);
	outStream<<", FPR = "<<setw(10)<<fixed<<100.0*ClassificationResult_ByPrediction.wFalsePositiveRate;
	outStream.precision(3);
	outStream<<", Prec = "<<setw(7)<<fixed<<100.0*ClassificationResult_ByPrediction.wPrecision;
	outStream<<endl;
}

void CAdaBoostTrainingResultForPosNegExampleList::MakeList( CBinaryClassificationExampleList& DstBinaryExampleList )
{
	int i;

	DstBinaryExampleList.PosList.Clear();
	for ( i = 0; i < _PosExampleAmount; ++i )
		DstBinaryExampleList.PosList.PushBack(rgPosExampleIndex[i], rgPosExampleConfidence[i]);
	DstBinaryExampleList.NegList.Clear();
	for ( i = 0; i < _NegExampleAmount; ++i )
		DstBinaryExampleList.NegList.PushBack(rgNegExampleIndex[i], rgNegExampleConfidence[i]);
	DstBinaryExampleList.ComputeTotalExampleWeight();
}

// [*] CAdaBoostClassifier

bool CAdaBoostClassifier::InputFromStream(istream& inStream)
{
	int i;
	bool ret = true;

	this->TotalTrainedWeakLearnerAmount = 0;

	if (!(inStream>>WcAmount)) ret = false;
	if (!(inStream>>CascadeThreshold)) ret = false;
	if (!(inStream>>AlphaSum)) ret = false;
	if (ret)
	{
		rgWc.resize(WcAmount);
		AlphaSum = 0.0;
		for ( i = 0; i < WcAmount; ++i )
		{
			if (!(rgWc[i].InputFromStream(inStream)))
			{
				ret = false;
				break;
			}
			AlphaSum += rgWc[i].alpha;
		}
	}
	if (!ret)
	{
		WcAmount = 0;
		CascadeThreshold = 0.0;
		AlphaSum = 0.0;
		rgWc.clear();
	}
	return ret;
}

void CAdaBoostClassifier::OutputToStream(ostream& outStream)
{
	int i;

	outStream.precision(12);
	outStream<<setw(4)<<noshowpos<<WcAmount;
	outStream<<'\t'<<showpos<<scientific<<CascadeThreshold;
	outStream<<'\t'<<showpos<<scientific<<AlphaSum;
	outStream<<endl;
	for ( i = 0; i < WcAmount; ++i )
		rgWc[i].OutputToStream(outStream);
}

void CAdaBoostClassifier::TrainNextLevel(
	CDataSetForBinaryClassification* pTrainDataSet, CBinaryClassificationExampleList& TrainExampleList,
	const CAdaBoostParameterConfig& Parameters,
	CAdaBoostTrainingResultForPosNegExampleList& TrainDetails,
	ostream& oLog
	)
{
	int i;
	vector<CWeightedClassificationExample>::iterator itr;
	int tPrediction;
	double tConfidence;

	rgWc.resize(++WcAmount);
	CSingleFeatureClassifier& CurrentWeakClassifier = rgWc[WcAmount-1];

	CurrentWeakClassifier.Train(pTrainDataSet, TrainExampleList, Parameters, oLog);

	double ee_1 = CurrentWeakClassifier.ErrorRate;
	double ee_2 = 0.0;
	double ee_s = TrainExampleList.GetTotalExampleWeight();

	AlphaSum += CurrentWeakClassifier.alpha;

	for ( itr = TrainExampleList.PosList.rgExample.begin(), i = 0; i < TrainExampleList.PosList.ExampleAmount; ++i, ++itr )
	{
		CurrentWeakClassifier.Classify(pTrainDataSet,itr->ExampleIndex,tConfidence,tPrediction);
		TrainDetails.rgPosExampleConfidence[i] += tConfidence;
		if (tPrediction == -1)
		{
			ee_2 += itr->Weight;
			itr->Weight *= CurrentWeakClassifier.beta;
		}
	}
	for ( itr = TrainExampleList.NegList.rgExample.begin(), i = 0; i < TrainExampleList.NegList.ExampleAmount; ++i, ++itr )
	{
		CurrentWeakClassifier.Classify(pTrainDataSet,itr->ExampleIndex,tConfidence,tPrediction);
		TrainDetails.rgNegExampleConfidence[i] += tConfidence;
		if (tPrediction == +1)
		{
			ee_2 += itr->Weight;
			itr->Weight *= CurrentWeakClassifier.beta;
		}
	}
	ee_2 /= ee_s;
	TrainExampleList.NormalizeExampleWeight();

	++TotalTrainedWeakLearnerAmount;
	if (Parameters.DataSetUpdatingFrequency>0 && TotalTrainedWeakLearnerAmount%Parameters.DataSetUpdatingFrequency==0)
		pTrainDataSet->UpdateValidFeatures();
	//TODO
}

double CAdaBoostClassifier::GetThreshold_w(
	CAdaBoostTrainingResultForPosNegExampleList& TrainDetails,
	double paraDR
	)
{
	double ret = 0.0;
	int i;
	int PosExampleAmount, NegExampleAmount;
	double t,tmin,tmax,t_eps;

	PosExampleAmount = (int)TrainDetails.rgPosExampleConfidence.size();
	NegExampleAmount = (int)TrainDetails.rgNegExampleConfidence.size();

	tmin = tmax = 0.0;
	for ( i = 0; i < PosExampleAmount; ++i )
	{
		t = TrainDetails.rgPosExampleConfidence[i];
		if (t < tmin) tmin = t;
		if (t > tmax) tmax = t;
	}
	for ( i = 0; i < NegExampleAmount; ++i )
	{
		t = TrainDetails.rgNegExampleConfidence[i];
		if (t < tmin) tmin = t;
		if (t > tmax) tmax = t;
	}

	t_eps = (tmax - tmin);
	t_eps = (t_eps>EPS ? t_eps : EPS);
	tmin -= t_eps * 0.5;
	tmax += t_eps * 0.5;
	double tbase = tmax;
	double trange = tmax - tmin;
	t_eps /= 1e8;
	for ( i = 0; i < 100; ++i )
	{
		t = (tmin + tmax) * 0.5;
		TrainDetails.RunTest(t);
		if (TrainDetails.CascadeDR<paraDR && TrainDetails.CascadeDR<1.0-EPS)
		{
			tmax = t;
			continue;
		}
		else
		{
			tmin = t;
			if (tmax - tmin < t_eps) break;
		}
	}

	if (paraDR > 1.0)
	{
		ret = (tbase - tmin) / trange;
		ret *= paraDR;
		if (ret > 1.0) ret = 1.0;
		ret = tbase - ret * trange;
	}
	else
		ret = tmin;
	return ret;
}

double CAdaBoostClassifier::GetThreshold_i(
	const vector<double>& rgPosExampleConfidence, const vector<double>& rgNegExampleConfidence,
	double paraDR
	)
{
	double ret = 0.0;
	int i;
	int PosExampleAmount, NegExampleAmount;
	double t,tmin,tmax;

	PosExampleAmount = (int)rgPosExampleConfidence.size();
	NegExampleAmount = (int)rgNegExampleConfidence.size();

	tmin = tmax = 0.0;
	for ( i = 0; i < PosExampleAmount; ++i )
	{
		t = rgPosExampleConfidence[i];
		if (t < tmin) tmin = t;
		if (t > tmax) tmax = t;
	}
	for ( i = 0; i < NegExampleAmount; ++i )
	{
		t = rgNegExampleConfidence[i];
		//if (t < tmin) tmin = t;
		if (t > tmax) tmax = t;
	}

	int K = (int)(PosExampleAmount * paraDR + 1.0 - EPS);
	if (K<=0)
		ret = tmax + EPS;
	else if (K>=PosExampleAmount)
		ret = tmin - EPS;
	else
	{
		ret = tmin - EPS - EPS;
		vector<double> temp_vector = rgPosExampleConfidence;
		double tTh = GetKthSmallestWeight(temp_vector,PosExampleAmount-K) - EPS;
		//for ( i = 0; i < PosExampleAmount; ++i )
		//{
		//	t = rgPosExampleConfidence[i];
		//	if (t<tTh && t>ret)
		//		ret = t;
		//}
		for ( i = 0; i < NegExampleAmount; ++i )
		{
			t = rgNegExampleConfidence[i];
			if (t<tTh && t>ret)
				ret = t;
		}
		ret += EPS;
	}
	return ret;
}

void CAdaBoostClassifier::Train(
	CDataSetForBinaryClassification* pTrainDataSet, CBinaryClassificationExampleList& TrainExampleList,
	CDataSetForBinaryClassification* pValDataSet, CBinaryClassificationExampleList& ValExampleList,
	const CAdaBoostParameterConfig& Parameters, const CAdaBoostTrainingEndCondition& EndCondition,
	CAdaBoostTrainingResultForPosNegExampleList& TrainDetails, CAdaBoostTrainingResultForPosNegExampleList& ValDetails,
	ostream& oLog
	)
{
	vector<CWeightedClassificationExample>::iterator itr;
	int i,j;
	double tConfidence;
	int tPrediction;

	bool fProcessValSet = (pValDataSet != NULL);

	CBinaryClassificationExampleList EL_Backup_Train;
	EL_Backup_Train = TrainExampleList;
	CBinaryClassificationExampleList EL_Backup_Val;
	EL_Backup_Val = ValExampleList;

	TrainDetails.myInit(TrainExampleList);
	if (fProcessValSet)
		ValDetails.myInit(ValExampleList);

	WcAmount = 0;
	rgWc.clear();
	AlphaSum = 0.0;

	char strTemp[10001];
	sprintf_s(strTemp, "Start Training AdaBoost Classifier...");
	if (Parameters.fOutputTrainLogToScreen)
		cout<<strTemp<<endl;
	if (Parameters.fOutputTrainLogToLogStream)
		oLog<<strTemp<<endl;

	CONSOLE_SCREEN_BUFFER_INFO CursorInfo;
	int fC = ::GetConsoleScreenBufferInfo(GetStdHandle(STD_OUTPUT_HANDLE),&CursorInfo);

	for ( i = 0; i < EndCondition.MaxWcAmount; ++i )
	{
		TrainNextLevel(pTrainDataSet, TrainExampleList, Parameters, TrainDetails, oLog);

		if (fProcessValSet)
		{
			CSingleFeatureClassifier& CurrentWeakClassifier = rgWc[WcAmount-1];
			for ( itr = ValExampleList.PosList.rgExample.begin(), j = 0; j < ValExampleList.PosList.ExampleAmount; ++j, ++itr )
			{
				CurrentWeakClassifier.Classify(pValDataSet,itr->ExampleIndex,tConfidence,tPrediction);
				ValDetails.rgPosExampleConfidence[j] += tConfidence;
			}
			for ( itr = ValExampleList.NegList.rgExample.begin(), j = 0; j < ValExampleList.NegList.ExampleAmount; ++j, ++itr )
			{
				CurrentWeakClassifier.Classify(pValDataSet,itr->ExampleIndex,tConfidence,tPrediction);
				ValDetails.rgNegExampleConfidence[j] += tConfidence;
			}
		}

		//BE_CAEREFUL
		if (fProcessValSet && Parameters.fDetermineLayerThresholdUsingValSet)
			CascadeThreshold = GetThreshold_w(ValDetails, EndCondition.TrainDR);
		else
			CascadeThreshold = GetThreshold_w(TrainDetails, EndCondition.TrainDR);

		TrainDetails.RunTest(CascadeThreshold);
		if (fProcessValSet)
			ValDetails.RunTest(CascadeThreshold);

		if (Parameters.fOutputTrainLogToScreen)
		{
			fC = ::SetConsoleCursorPosition(GetStdHandle(STD_OUTPUT_HANDLE),CursorInfo.dwCursorPosition);
			cout<<" #.WC = "<<setw(4)<<WcAmount<<endl;
			cout<<" Train Set:"<<endl;
			TrainDetails.OutputToStream_w(cout);
			if (fProcessValSet)
			{
				cout<<" Val Set:"<<endl;
				ValDetails.OutputToStream_w(cout);
			}
		}

		//BE_CAEREFUL
		if (fProcessValSet && Parameters.fDetermineLayerThresholdUsingValSet)
		{
			if (ValDetails.CascadeFPR <= EndCondition.TrainFPR) break;
		}
		else
		{
			if (TrainDetails.CascadeFPR <= EndCondition.TrainFPR) break;
		}
	}

	TrainExampleList = EL_Backup_Train;
	ValExampleList = EL_Backup_Val;

	if (Parameters.fOutputTrainLogToScreen)
	{
		oLog<<" #.WC = "<<setw(4)<<WcAmount<<endl;
		oLog<<" Train Set:"<<endl;
		TrainDetails.OutputToStream_w(oLog);
		if (fProcessValSet)
		{
			oLog<<" Val Set:"<<endl;
			ValDetails.OutputToStream_w(oLog);
		}
	}
}

void CAdaBoostClassifier::Train(
	CDataSetForBinaryClassification* pTrainDataSet, CBinaryClassificationExampleList& TrainExampleList,
	const CAdaBoostParameterConfig& Parameters, int WeakClassiferAmount, ostream& oLog )
{
	CAdaBoostParameterConfig tParameters = Parameters;
	tParameters.fDetermineLayerThresholdUsingValSet = false;

	CAdaBoostTrainingEndCondition tEndCondition;
	tEndCondition.MaxWcAmount = WeakClassiferAmount;
	tEndCondition.TrainDR = 1.0;
	tEndCondition.TrainFPR = -1.0;

	CAdaBoostTrainingResultForPosNegExampleList TR_Train;
	CAdaBoostTrainingResultForPosNegExampleList TR_Val;

	this->Train(
		pTrainDataSet, TrainExampleList, NULL, TrainExampleList,
		tParameters, tEndCondition, TR_Train, TR_Val, oLog);
}

void CAdaBoostClassifier::Train(
	CDataSetForBinaryClassification* pTrainDataSet, CBinaryClassificationExampleList& TrainExampleList,
	CDataSetForBinaryClassification* pValDataSet, CBinaryClassificationExampleList& ValExampleList,
	const CAdaBoostParameterConfig& Parameters, int WeakClassiferAmount, ostream& oLog )
{
	CAdaBoostTrainingEndCondition tEndCondition;
	tEndCondition.MaxWcAmount = WeakClassiferAmount;
	tEndCondition.TrainDR = 1.0;
	tEndCondition.TrainFPR = -1.0;

	CAdaBoostTrainingResultForPosNegExampleList TR_Train;
	CAdaBoostTrainingResultForPosNegExampleList TR_Val;

	this->Train(
		pTrainDataSet, TrainExampleList, pValDataSet, ValExampleList,
		Parameters, tEndCondition, TR_Train, TR_Val, oLog);
}

void CAdaBoostClassifier::Train(
	CDataSetForBinaryClassification* pTrainDataSet, CBinaryClassificationExampleList& TrainExampleList,
	const CAdaBoostParameterConfig& Parameters, const CAdaBoostTrainingEndCondition& EndCondition, ostream& oLog )
{
	CAdaBoostParameterConfig tParameters = Parameters;
	tParameters.fDetermineLayerThresholdUsingValSet = false;

	CAdaBoostTrainingResultForPosNegExampleList TR_Train;
	CAdaBoostTrainingResultForPosNegExampleList TR_Val;

	this->Train(
		pTrainDataSet, TrainExampleList, NULL, TrainExampleList,
		tParameters, EndCondition, TR_Train, TR_Val, oLog);
}

void CAdaBoostClassifier::Train(
	CDataSetForBinaryClassification* pTrainDataSet, CBinaryClassificationExampleList& TrainExampleList,
	CDataSetForBinaryClassification* pValDataSet, CBinaryClassificationExampleList& ValExampleList,
	const CAdaBoostParameterConfig& Parameters, const CAdaBoostTrainingEndCondition& EndCondition, ostream& oLog )
{
	CAdaBoostTrainingResultForPosNegExampleList TR_Train;
	CAdaBoostTrainingResultForPosNegExampleList TR_Val;

	this->Train(
		pTrainDataSet, TrainExampleList, pValDataSet, ValExampleList,
		Parameters, EndCondition, TR_Train, TR_Val, oLog);
}

void CAdaBoostClassifier::_Classify(CDataSetForClassification* pDataSet, int ExampleIndex, double& DstConfidence, int& DstPrediction)
{
	int i,tPrediction;
	double tConfidence;

	DstConfidence = 0.0;
	for ( i = 0; i < WcAmount; ++i ) {
		rgWc[i].Classify(pDataSet, ExampleIndex, tConfidence, tPrediction);
		DstConfidence += tConfidence;
	}
	DstPrediction = (DstConfidence>=0 ? +1 : -1);
}

void CAdaBoostClassifier::Classify( CDataSetForClassification* pDataSet, int ExampleIndex, double& DstConfidence, int& DstPrediction )
{
	this->_Classify(pDataSet, ExampleIndex, DstConfidence, DstPrediction);
}

