
#include "cl_boost_Cascade.h"

#define USE_EQUAL_POS_NEG_DATA_WEIGHT 0

#define USE_FULL_POS_DATA_EACH_LAYER 1

#define SAVE_POS_DATA 1

CCascadedAdaBoostParametersConfig::CCascadedAdaBoostParametersConfig()
{
	AdaBoostConfig = CAdaBoostParameterConfig();
	DefaultLayerTrainingCondition = CAdaBoostTrainingEndCondition();

	MaxLayerAmount = 1;
	PerLayerTrainingEndCondition.Clear();

	TrainFeatureGroupAmount = 10;

	MaxTrainPosExampleAmount = 65536;
	MinTrainPosExampleAmount = -1;
	MaxTrainNegExampleAmount = 65536;
	MinTrainNegExampleAmount = 4096;

	strPN_ForTrain = ".\\";

	strFN_PosFeatureData_Train = "pos_feature.train.dat";
	strFN_PosFeatureData_Val = "pos_feature.val.dat";
	FeatureMemoryUsage_Pos_Train = 200;
	FeatureMemoryUsage_Pos_Val = 100;

	strFN_TempFeatureData_Train = "feature.train.dat";
	strFN_TempFeatureData_Val = "feature.val.dat";
	FeatureMemoryUsage_Train = 768;
	FeatureMemoryUsage_Val = 384;

	OutputROC_SamplingCount = 120;
	OutputROC_MinFPR = 1e-6;
	OutputROC_MaxFPR = 1e-0;
	OutputROC_fUseLogScale = true;
}

void CCascadedAdaBoostParametersConfig::InitLayerTrainingEndConditionList(string strFN_Config)
{
	if (strFN_Config == "")
	{
		PerLayerTrainingEndCondition.Resize(MaxLayerAmount);
		PerLayerTrainingEndCondition.Fill(DefaultLayerTrainingCondition);
		return;
	}

	ifstream inFile(strFN_Config.c_str());

	PerLayerTrainingEndCondition.Clear();
	int LastLayerIndex, LayerIndex;
	int a;
	double b,c;
	CAdaBoostTrainingEndCondition LastCondition = DefaultLayerTrainingCondition;
	for ( LastLayerIndex = 0; ; )
	{
		if (!(inFile>>LayerIndex) || LayerIndex<LastLayerIndex)
		{
			if ((int)PerLayerTrainingEndCondition.Size >= MaxLayerAmount) break;
			PerLayerTrainingEndCondition.Resize(MaxLayerAmount, LastCondition);
		}
		while (LastLayerIndex < LayerIndex)
		{
			++LastLayerIndex;
			PerLayerTrainingEndCondition.PushBack(LastCondition);
		}
		if (inFile>>a>>b>>c)
		{
			LastCondition.MaxWcAmount = a;
			LastCondition.TrainDR = b;
			LastCondition.TrainFPR = c;
		}
	}

	inFile.clear();
	inFile.close();
}

void CDetailedBinaryCascadeClassifyResult::myRelease()
{
	_size = 0;
	TestCaseAmount = 0;
	CDataStructureTools::MyRelease_List(resPerCaseRealFlag);
	CDataStructureTools::MyRelease_List(resPerCaseResult);
	CDataStructureTools::MyRelease_List(resPerCasePrediction);
	CDataStructureTools::MyRelease_List(resPerCaseExitLayer);
}

CDetailedBinaryCascadeClassifyResult::CDetailedBinaryCascadeClassifyResult()
{
	_size = 0;
	resPerCaseRealFlag = NULL;
	resPerCaseResult = NULL;
	resPerCasePrediction = NULL;
	resPerCaseExitLayer = NULL;
}

CDetailedBinaryCascadeClassifyResult::~CDetailedBinaryCascadeClassifyResult()
{
	myRelease();
}

void CDetailedBinaryCascadeClassifyResult::myInit(int MaxTestCaseAmount)
{
	if (MaxTestCaseAmount<1) MaxTestCaseAmount = 1;
	if (_size < MaxTestCaseAmount)
	{
		myRelease();
		_size = MaxTestCaseAmount;
		resPerCaseRealFlag = new int [MaxTestCaseAmount];
		resPerCaseResult = new double [MaxTestCaseAmount];
		resPerCasePrediction = new int [MaxTestCaseAmount];
		resPerCaseExitLayer = new int [MaxTestCaseAmount];
	}
	Clear();
}

void CDetailedBinaryCascadeClassifyResult::NewTest(int RealFlag, double Result, int Prediction, int ExitLayer, double Weight)
{
	Performance.NewTest(RealFlag, Prediction, Weight);
	resPerCaseRealFlag[TestCaseAmount] = RealFlag;
	resPerCaseResult[TestCaseAmount] = Result;
	resPerCasePrediction[TestCaseAmount] = Prediction;
	resPerCaseExitLayer[TestCaseAmount] = ExitLayer;
	++TestCaseAmount;
}

void CDetailedBinaryCascadeClassifyResult::Clear()
{
	Performance.Clear();
	TestCaseAmount = 0;
}

void CDetailedBinaryCascadeClassifyResult::Analyse()
{
	Performance.Analyse();
}

// [*] CCascadedAdaBoostClassifier

CCascadedAdaBoostClassifier::CCascadedAdaBoostClassifier()
{
	LayerAmount = 0;
	rgLayer.clear();
	total_trained_weak_learner_amount = 0;
	SetAndClearTestingConfig(-1, false);
}

bool CCascadedAdaBoostClassifier::InputFromStream(istream& inStream)
{
	int i;
	bool ret = true;

	if (inStream>>LayerAmount)
	{
		total_trained_weak_learner_amount = 0;
		rgLayer.resize(LayerAmount);
		for ( i = 0; i < LayerAmount; ++i )
		{
			if (!(rgLayer[i].InputFromStream(inStream)))
			{
				ret = false;
				break;
			}
			total_trained_weak_learner_amount += rgLayer[i].WcAmount;
			rgLayer[i].TotalTrainedWeakLearnerAmount = total_trained_weak_learner_amount;
		}
	}
	else
		ret = false;

	if (!ret)
	{
		LayerAmount = 0;
		total_trained_weak_learner_amount = 0;
		rgLayer.resize(0);
	}

	SetAndClearTestingConfig(-1, false);

	return ret;
}

void CCascadedAdaBoostClassifier::OutputToStream(ostream& outStream)
{
	int i;

	outStream.precision(PRINT_NUM_PREC_REAL);
	outStream<<LayerAmount<<endl;
	for ( i = 0; i < LayerAmount; ++i )
	{
		rgLayer[i].OutputToStream(outStream);
	}
}

void CCascadedAdaBoostClassifier::FilterExampleList(
	CDataSetForClassification* pDataSet, CBinaryClassificationExampleList& TargetSignedExampleList,
	int LayerToStart, int LayerToFinish,
	CBinaryClassificationResult_ByPrediction& bcrP_cascade,
	CBinaryClassificationResult_ByPrediction& bcrP_backup,
	CBinaryClassificationResult_ByPrediction& bcrP_classify,
	CBinaryClassificationResult_ByConfidence& bcrC_backup,
	CBinaryClassificationResult_ByConfidence& bcrC_classify,
	double& time_cost, ostream& oLog)
{
	if (LayerToStart < 0)
		LayerToStart = 0;
	if (LayerToStart > LayerAmount)
		LayerToStart = LayerAmount;
	if (LayerToFinish > LayerAmount)
		LayerToFinish = LayerAmount;

	if (LayerToStart >= LayerToFinish) return;

	CTimer filter_timer;
	filter_timer.Reset();
	filter_timer.Start();

	CBinaryClassificationResult_ByPrediction bcrP_current;
	bcrP_current.Clear();

	char strTemp[10001];
	int example_amount;
	int i, j, k, o, tp;
	bool fGoOn;
	double tw, tc;

	tc = 0.0; for ( o = LayerToStart; o < LayerToFinish; ++o ) tc -= rgLayer[o].AlphaSum;
	for ( i = 0; i < (int)bcrC_backup.Pos.Size; ++i ) bcrC_backup.Pos.pElement[i] += tc;
	for ( i = 0; i < (int)bcrC_backup.Neg.Size; ++i ) bcrC_backup.Neg.pElement[i] += tc;

	bcrP_cascade = bcrP_backup;
	bcrP_classify = bcrP_backup;
	bcrC_classify = bcrC_backup;

	// Pos List
	example_amount = TargetSignedExampleList.PosList.ExampleAmount;
	for ( j = i = 0; i < example_amount; ++i )
	{
		CWeightedClassificationExample& t_node = TargetSignedExampleList.PosList.rgExample[i];
		tw = t_node.Weight;
		tc = 0.0; fGoOn = true;
		for ( o = LayerToStart; o < LayerToFinish; ++o )
		{
			CAdaBoostClassifier& CurrentLayer = rgLayer[o];
			if (fGoOn)
				CurrentLayer.Classify(pDataSet, t_node.ExampleIndex, tc, tp);
			else
				tc -= CurrentLayer.AlphaSum;
			fGoOn = fGoOn && tc>CurrentLayer.CascadeThreshold;
		}

		bcrC_classify.Pos.PushBack(tc);
		if (fGoOn)
		{
			bcrP_cascade.NewTest(+1, +1, tw);
			bcrP_current.NewTest(+1, +1, tw);
			TargetSignedExampleList.PosList.rgExample[j++] = t_node;
		}
		else
		{
			bcrP_cascade.NewTest(+1, -1, tw);
			bcrP_current.NewTest(+1, -1, tw);
			bcrP_backup.NewTest(+1, -1, tw);
			bcrC_backup.Pos.PushBack(tc);
		}
		if (tc>0.0 && fGoOn)
			bcrP_classify.NewTest(+1, +1, tw);
		else
			bcrP_classify.NewTest(+1, -1, tw);

		if (CMyRandom::NextInt(example_amount/1000) == 0)
		{
			CExternalTools::ConsoleTools::ClearCurrentLine();
			sprintf_s(strTemp, "Classifying Pos Examples %d / %d", i+1, example_amount);
			cout<<strTemp;
		}
	}
	CExternalTools::ConsoleTools::ClearCurrentLine();

	// Neg List
	example_amount = TargetSignedExampleList.NegList.ExampleAmount;
	for ( k = i = 0; i < example_amount; ++i )
	{
		CWeightedClassificationExample& t_node = TargetSignedExampleList.NegList.rgExample[i];
		tw = t_node.Weight;
		tc = 0.0; fGoOn = true;
		for ( o = LayerToStart; o < LayerToFinish; ++o )
		{
			CAdaBoostClassifier& CurrentLayer = rgLayer[o];
			if (fGoOn)
				CurrentLayer.Classify(pDataSet, t_node.ExampleIndex, tc, tp);
			else
				tc -= CurrentLayer.AlphaSum;
			fGoOn = fGoOn && tc>CurrentLayer.CascadeThreshold;
		}

		bcrC_classify.Neg.PushBack(tc);
		if (fGoOn)
		{
			bcrP_cascade.NewTest(-1, +1, tw);
			bcrP_current.NewTest(-1, +1, tw);
			TargetSignedExampleList.NegList.rgExample[k++] = t_node;
		}
		else
		{
			bcrP_cascade.NewTest(-1, -1, tw);
			bcrP_current.NewTest(-1, -1, tw);
			bcrP_backup.NewTest(-1, -1, tw);
			bcrC_backup.Neg.PushBack(tc);
		}
		if (tc>0.0 && fGoOn)
			bcrP_classify.NewTest(-1, +1, tw);
		else
			bcrP_classify.NewTest(-1, -1, tw);

		if (CMyRandom::NextInt(example_amount/1000) == 0)
		{
			CExternalTools::ConsoleTools::ClearCurrentLine();
			sprintf_s(strTemp, "Classifying Neg Examples %d / %d", i+1, example_amount);
			cout<<strTemp;
		}
	}
	CExternalTools::ConsoleTools::ClearCurrentLine();

	// Merge
	bcrP_current.Analyse();
	bcrP_cascade.Analyse();
	bcrP_backup.Analyse(); bcrP_classify.Analyse();
	bcrC_backup.Analyse(); bcrC_classify.Analyse();
	filter_timer.Stop();
	time_cost = filter_timer.GetElapsedTime();

	i = j + k;
	TargetSignedExampleList.PosList.rgExample.resize(TargetSignedExampleList.PosList.ExampleAmount = j);
	TargetSignedExampleList.NegList.rgExample.resize(TargetSignedExampleList.NegList.ExampleAmount = k);

	TargetSignedExampleList.ComputeTotalExampleWeight();

	sprintf_s(strTemp, "[%s] Finished Classifying Data Till Layer %d;", CExternalTools::GetTimeText(time_cost), LayerToFinish);
	cout<<strTemp<<endl;
	oLog<<strTemp<<endl;
	sprintf_s(strTemp, "%9d [%6.2lf%% / %6.2lf%%] Pos Examples left;",
		j, 100.0*bcrP_current.wDetectionRate, 100.0*bcrP_cascade.wDetectionRate);
	cout<<strTemp<<endl;
	oLog<<strTemp<<endl;
	sprintf_s(strTemp, "%9d [%6.3lf%% / %6.3lf%%] Neg Examples left;",
		k, 100.0*bcrP_current.wFalsePositiveRate, 100.0*bcrP_cascade.wFalsePositiveRate);
	cout<<strTemp<<endl;
	oLog<<strTemp<<endl;
}

void CCascadedAdaBoostClassifier::Train(
	string strFN_DstModel, int LayerToStartTraining, int LayerToStartClassification,
	CDataSetForBinaryClassification* pDataSet_Train, CBinaryClassificationExampleList& ExampleList_Train,
	CDataSetForBinaryClassification* pDataSet_Val, CBinaryClassificationExampleList& ExampleList_Val,
	CCascadedAdaBoostParametersConfig& CascadeConfig,
	ostream& oLog
	)
{
	this->Train(
		strFN_DstModel, LayerToStartTraining, LayerToStartClassification,
		pDataSet_Train, ExampleList_Train,
		pDataSet_Val, ExampleList_Val,
		CascadeConfig,
		oLog,
		NULL, NULL
		);
}

void CCascadedAdaBoostClassifier::Train(
	string strFN_DstModel, int LayerToStartTraining, int LayerToStartClassification,
	CDataSetForBinaryClassification* pDataSet_Train, CBinaryClassificationExampleList& ExampleList_Train,
	CDataSetForBinaryClassification* pDataSet_Val, CBinaryClassificationExampleList& ExampleList_Val,
	CCascadedAdaBoostParametersConfig& CascadeConfig,
	ostream& oLog,
	CSimpleTypeArray<CBinaryClassificationExampleList>* pFilteredExampleList_Train,
	CSimpleTypeArray<CBinaryClassificationExampleList>* pFilteredExampleList_Val
	)
{
	SetAndClearTestingConfig(-1, false);

	char strTemp[10001];
	int i;
	double tw = 1.0;
	//int j, k, tp, example_amount;
	//double tc;

	CTimer full_train_timer, layer_timer;

	CBinaryClassificationExampleList CurrentTrainExampleList;
	CBinaryClassificationExampleList CurrentValExampleList;

	double t_time_cost, s_time_cost;

	CBinaryClassificationResult_ByPrediction bcrP_cascade_Train, bcrP_cascade_Val;

	CBinaryClassificationResult_ByPrediction bcrP_classify_Train, bcrP_classify_Val;
	CBinaryClassificationResult_ByConfidence bcrC_classify_Train, bcrC_classify_Val;

	CBinaryClassificationResult_ByPrediction bcrP_backup_Train, bcrP_backup_Val;
	CBinaryClassificationResult_ByConfidence bcrC_backup_Train, bcrC_backup_Val;

	CAdaBoostTrainingResultForPosNegExampleList AdaBoostTrainDetails, AdaBoostValDetails;

	CAdaBoostTrainingEndCondition CurrentLayerEndCondition;
	CAdaBoostTrainingEndCondition LastLayerEndCondition;

	ExampleList_Train.ComputeTotalExampleWeight();
	ExampleList_Val.ComputeTotalExampleWeight();

	if (LayerAmount < 0)
		LayerAmount = 0;
	if (LayerToStartTraining > LayerAmount)
		LayerToStartTraining = LayerAmount;
	if (LayerToStartClassification > LayerAmount)
		LayerToStartClassification = LayerAmount;

	CurrentTrainExampleList.myInit(ExampleList_Train);
	CurrentValExampleList.myInit(ExampleList_Val);
	LayerAmount = LayerToStartTraining;
	rgLayer.resize(CascadeConfig.MaxLayerAmount);

	bcrP_cascade_Train.Clear(); bcrP_cascade_Val.Clear();

	bcrP_backup_Train.Clear(); bcrP_backup_Val.Clear();
	bcrC_backup_Train.Clear(); bcrC_backup_Val.Clear();

	bcrP_classify_Train.Clear(); bcrP_classify_Val.Clear();
	bcrC_classify_Train.Clear(); bcrC_classify_Val.Clear();

	total_trained_weak_learner_amount = 0;
	if (LayerAmount > 0)
		total_trained_weak_learner_amount = rgLayer[LayerAmount - 1].TotalTrainedWeakLearnerAmount;

	cout<<endl; oLog<<endl;
	sprintf_s(strTemp, "Start training Cascade AdaBoost Classifier:");
	cout<<strTemp<<endl; oLog<<strTemp<<endl;
	sprintf_s(strTemp, "Feature Dim = %d, Feature sample rate = %.4lf;", pDataSet_Train->GetFeatureDimension(), CascadeConfig.AdaBoostConfig.SFC_FeatureSampleRate);
	cout<<strTemp<<endl; oLog<<strTemp<<endl;
	sprintf_s(strTemp, "|Train.Pos| = %5d, |Train.Neg| = %d;", ExampleList_Train.PosList.ExampleAmount, ExampleList_Train.NegList.ExampleAmount);
	cout<<strTemp<<endl; oLog<<strTemp<<endl;
	sprintf_s(strTemp, "|  Val.Pos| = %5d, |  Val.Neg| = %d;", ExampleList_Val.PosList.ExampleAmount, ExampleList_Val.NegList.ExampleAmount);
	cout<<strTemp<<endl; oLog<<strTemp<<endl;
	sprintf_s(strTemp, "Use Validation Set For Training = [%s];",
		(CascadeConfig.AdaBoostConfig.fDetermineLayerThresholdUsingValSet ? "True" : "False"));
	cout<<strTemp<<endl; oLog<<strTemp<<endl;
	sprintf_s(strTemp, "Layer to Start Training/Classification = %d/%d;", LayerToStartTraining, LayerToStartClassification);
	cout<<strTemp<<endl; oLog<<strTemp<<endl;
	sprintf_s(strTemp, "Target Model File = [%s];", strFN_DstModel.c_str());
	cout<<strTemp<<endl; oLog<<strTemp<<endl;
	cout<<endl; oLog<<endl;

	full_train_timer.Reset();
	full_train_timer.Start();

	if (1)
	{
#if SAVE_POS_DATA
		CLabeledDataSetForClassification_Proxy ProxyDataSet_Train;
		CLabeledDataSetForClassification_Proxy ProxyDataSet_Val;
		CLabeledDataSetForClassification_ByExample PosDataSet_Train;
		CLabeledDataSetForClassification_ByExample PosDataSet_Val;

		layer_timer.Reset();
		if (1)
		{
			layer_timer.Start();
			sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_PosFeatureData_Train.c_str());

			CLabeledDataSetForClassification_ByExample::GenerateFeatureFile(
				strTemp,
				pDataSet_Train->pRealDataSet, CurrentTrainExampleList.PosList, true,
				CascadeConfig.FeatureMemoryUsage_Pos_Train + CascadeConfig.FeatureMemoryUsage_Pos_Val);

			layer_timer.Stop();
			sprintf_s(strTemp, "[%s] Finished generating Train Pos feature;", CExternalTools::GetTimeText(layer_timer.GetElapsedTime()));
			cout<<strTemp<<endl;
			oLog<<strTemp<<endl;

			layer_timer.Start();
			sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_PosFeatureData_Val.c_str());
			CLabeledDataSetForClassification_ByExample::GenerateFeatureFile(
				strTemp,
				pDataSet_Val->pRealDataSet, CurrentValExampleList.PosList, true,
				CascadeConfig.FeatureMemoryUsage_Pos_Train + CascadeConfig.FeatureMemoryUsage_Pos_Val);

			layer_timer.Stop();
			sprintf_s(strTemp, "[%s] Finished generating Val   Pos feature;", CExternalTools::GetTimeText(layer_timer.GetElapsedTime()));
			cout<<strTemp<<endl;
			oLog<<strTemp<<endl;

			//cout<<endl;
			//oLog<<endl;
		}

		if (1)
		{
			sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_PosFeatureData_Train.c_str());
			PosDataSet_Train.myInit(strTemp, CascadeConfig.FeatureMemoryUsage_Pos_Train);
			ProxyDataSet_Train.InitSourceAndProxy(pDataSet_Train->pRealDataSet, &PosDataSet_Train);
			for ( i = 0; i < CurrentTrainExampleList.PosList.ExampleAmount; ++i )
				ProxyDataSet_Train.SetProxy(CurrentTrainExampleList.PosList.rgExample[i].ExampleIndex, i);

			sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_PosFeatureData_Val.c_str());
			PosDataSet_Val.myInit(strTemp, CascadeConfig.FeatureMemoryUsage_Pos_Val);
			ProxyDataSet_Val.InitSourceAndProxy(pDataSet_Val->pRealDataSet, &PosDataSet_Val);
			for ( i = 0; i < CurrentValExampleList.PosList.ExampleAmount; ++i )
				ProxyDataSet_Val.SetProxy(CurrentValExampleList.PosList.rgExample[i].ExampleIndex, i);
		}
#else
#endif

		// Filtering First Train Example List
		sprintf_s(strTemp, "Start Generating Train Example List...");
		cout<<strTemp<<endl;
		oLog<<strTemp<<endl;
#if SAVE_POS_DATA
		this->FilterExampleList(
			&ProxyDataSet_Train, CurrentTrainExampleList,
			LayerToStartClassification, LayerAmount,
			bcrP_cascade_Train,
			bcrP_backup_Train, bcrP_classify_Train,
			bcrC_backup_Train, bcrC_classify_Train,
			t_time_cost, oLog);
#else
		this->FilterExampleList(
			pDataSet_Train, CurrentTrainExampleList,
			LayerToStartClassification, LayerAmount,
			bcrP_cascade_Train,
			bcrP_backup_Train, bcrP_classify_Train,
			bcrC_backup_Train, bcrC_classify_Train,
			t_time_cost, oLog);
#endif

		// Filtering First Val Example List
		sprintf_s(strTemp, "Start Generating Val Example List...");
		cout<<strTemp<<endl;
		oLog<<strTemp<<endl;
#if SAVE_POS_DATA
		this->FilterExampleList(
			&ProxyDataSet_Val, CurrentValExampleList,
			LayerToStartClassification, LayerAmount,
			bcrP_cascade_Val,
			bcrP_backup_Val, bcrP_classify_Val,
			bcrC_backup_Val, bcrC_classify_Val,
			t_time_cost, oLog);
#else
		this->FilterExampleList(
			pDataSet_Val, CurrentValExampleList,
			LayerToStartClassification, LayerAmount,
			bcrP_cascade_Val,
			bcrP_backup_Val, bcrP_classify_Val,
			bcrC_backup_Val, bcrC_classify_Val,
			t_time_cost, oLog);
#endif

		cout<<endl;
		oLog<<endl;

		if (pFilteredExampleList_Train != NULL) pFilteredExampleList_Train->Clear();
		if (pFilteredExampleList_Val != NULL) pFilteredExampleList_Val->Clear();

		LastLayerEndCondition = CascadeConfig.DefaultLayerTrainingCondition;
		while (LayerAmount < CascadeConfig.MaxLayerAmount)
		{
			if (CurrentTrainExampleList.PosList.ExampleAmount < 1) break;
			if (CascadeConfig.MinTrainPosExampleAmount>0 && CurrentTrainExampleList.PosList.ExampleAmount<CascadeConfig.MinTrainPosExampleAmount) break;
			if (CurrentTrainExampleList.NegList.ExampleAmount < 1) break;
			if (CascadeConfig.MinTrainNegExampleAmount>0 && CurrentTrainExampleList.NegList.ExampleAmount<CascadeConfig.MinTrainNegExampleAmount) break;

			CAdaBoostClassifier& CurrentLayer = rgLayer[LayerAmount++];
			layer_timer.Reset();

			sprintf_s(strTemp, "Start training cascade layer %d...", LayerAmount);
			cout<<strTemp<<endl;
			oLog<<strTemp<<endl;

			if (LayerAmount <= (int)CascadeConfig.PerLayerTrainingEndCondition.Size)
			{
				CurrentLayerEndCondition = CascadeConfig.PerLayerTrainingEndCondition.pElement[LayerAmount-1];
				LastLayerEndCondition = CurrentLayerEndCondition;
			}
			else
				CurrentLayerEndCondition = LastLayerEndCondition;

			sprintf_s(strTemp, "Target: MaxWcAmount = %d, DR / FPR = %.3lf / %.5lf",
				CurrentLayerEndCondition.MaxWcAmount, CurrentLayerEndCondition.TrainDR, CurrentLayerEndCondition.TrainFPR);
			cout<<strTemp<<endl;
			oLog<<strTemp<<endl;

			CBinaryClassificationExampleList SampledCurrentTrainExampleList;
			CBinaryClassificationExampleList SampledCurrentValExampleList;
			if (1)
			{
				CBinaryClassificationExampleList TempExampleList;
				TempExampleList = CurrentTrainExampleList;
#if USE_FULL_POS_DATA_EACH_LAYER
				TempExampleList.PosList = ExampleList_Train.PosList;
#endif
				double PosSampleRate = 1.0 * CascadeConfig.MaxTrainPosExampleAmount / TempExampleList.PosList.ExampleAmount;
				if (PosSampleRate < EPS) PosSampleRate = 1.0;
				if (PosSampleRate > 1.0) PosSampleRate = 1.0;
				double NegSampleRate = 1.0 * CascadeConfig.MaxTrainNegExampleAmount / TempExampleList.NegList.ExampleAmount;
				if (NegSampleRate < EPS) NegSampleRate = 1.0;
				if (NegSampleRate > 1.0) NegSampleRate = 1.0;
				CWeightedClassificationExampleList TempMixedExampleList;
				CSimpleTypeArray<int> RFI;

				// Sample Train Data
				SampledCurrentTrainExampleList.myInit(TempExampleList, PosSampleRate, NegSampleRate);
				//tw = TempExampleList.PosList.TotalExampleWeight / SampledCurrentTrainExampleList.PosList.TotalExampleWeight;
				//for ( i = 0; i < SampledCurrentTrainExampleList.PosList.ExampleAmount; ++i ) SampledCurrentTrainExampleList.PosList.rgExample[i].Weight *= tw;
				//tw = TempExampleList.NegList.TotalExampleWeight / SampledCurrentTrainExampleList.NegList.TotalExampleWeight;
				//for ( i = 0; i < SampledCurrentTrainExampleList.NegList.ExampleAmount; ++i ) SampledCurrentTrainExampleList.NegList.rgExample[i].Weight *= tw;
#if USE_EQUAL_POS_NEG_DATA_WEIGHT
				SampledCurrentTrainExampleList.ComputeTotalExampleWeight();
				if (CurrentLayerEndCondition.TrainFPR > -EPS)
				{
					tw = CurrentLayerEndCondition.TrainFPR / (1.0 - CurrentLayerEndCondition.TrainDR);
					tw *= SampledCurrentTrainExampleList.NegList.TotalExampleWeight / SampledCurrentTrainExampleList.PosList.TotalExampleWeight;
					for ( i = 0; i < SampledCurrentTrainExampleList.PosList.ExampleAmount; ++i ) SampledCurrentTrainExampleList.PosList.rgExample[i].Weight *= tw;
				}
				//tw = SampledCurrentTrainExampleList.NegList.TotalExampleWeight / SampledCurrentTrainExampleList.PosList.TotalExampleWeight;
				//for ( i = 0; i < SampledCurrentTrainExampleList.PosList.ExampleAmount; ++i ) SampledCurrentTrainExampleList.PosList.rgExample[i].Weight *= tw;
#endif
				SampledCurrentTrainExampleList.NormalizeExampleWeight();
				layer_timer.Start();
				SampledCurrentTrainExampleList.MakeMixedList(TempMixedExampleList);

#if SAVE_POS_DATA
				CLabeledDataSetForClassification_ByGroupedFeature::GenerateRandomFeatureIndex(RFI, &ProxyDataSet_Train); // pDataSet_Train);
				sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Train.c_str());
				CLabeledDataSetForClassification_ByGroupedFeature::GenerateFeatureFile(
					strTemp,
					&ProxyDataSet_Train, RFI, TempMixedExampleList, true,
					CascadeConfig.FeatureMemoryUsage_Train + CascadeConfig.FeatureMemoryUsage_Val);
#else
				CLabeledDataSetForClassification_ByGroupedFeature::GenerateRandomFeatureIndex(RFI, pDataSet_Train);
				sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Train.c_str());
				CLabeledDataSetForClassification_ByGroupedFeature::GenerateFeatureFile(
					strTemp,
					pDataSet_Train->pRealDataSet, RFI, TempMixedExampleList,
					CascadeConfig.FeatureMemoryUsage_Train + CascadeConfig.FeatureMemoryUsage_Val);
#endif
				layer_timer.Stop();
				sprintf_s(strTemp, "[%s] Finished making [Train] feature, %d/%d examples sampled;",
					CExternalTools::GetTimeText(layer_timer.GetElapsedTime()),
					SampledCurrentTrainExampleList.PosList.ExampleAmount, SampledCurrentTrainExampleList.NegList.ExampleAmount);
				cout<<strTemp<<endl;
				oLog<<strTemp<<endl;

				TempExampleList = CurrentValExampleList;
#if USE_FULL_POS_DATA_EACH_LAYER
				TempExampleList.PosList = ExampleList_Val.PosList;
#endif
				// Sample Val Data
				SampledCurrentValExampleList.myInit(TempExampleList, PosSampleRate, NegSampleRate);
				//tw = TempExampleList.PosList.TotalExampleWeight / SampledCurrentValExampleList.PosList.TotalExampleWeight;
				//for ( i = 0; i < SampledCurrentValExampleList.PosList.ExampleAmount; ++i ) SampledCurrentValExampleList.PosList.rgExample[i].Weight *= tw;
				//tw = TempExampleList.NegList.TotalExampleWeight / SampledCurrentValExampleList.NegList.TotalExampleWeight;
				//for ( i = 0; i < SampledCurrentValExampleList.NegList.ExampleAmount; ++i ) SampledCurrentValExampleList.NegList.rgExample[i].Weight *= tw;
#if USE_EQUAL_POS_NEG_DATA_WEIGHT
				SampledCurrentValExampleList.ComputeTotalExampleWeight();
				if (CurrentLayerEndCondition.TrainFPR > -EPS)
				{
					tw = CurrentLayerEndCondition.TrainFPR / (1.0 - CurrentLayerEndCondition.TrainDR);
					tw *= SampledCurrentValExampleList.NegList.TotalExampleWeight / SampledCurrentValExampleList.PosList.TotalExampleWeight;
					for ( i = 0; i < SampledCurrentValExampleList.PosList.ExampleAmount; ++i ) SampledCurrentValExampleList.PosList.rgExample[i].Weight *= tw;
				}
				//tw = SampledCurrentValExampleList.NegList.TotalExampleWeight / SampledCurrentValExampleList.PosList.TotalExampleWeight;
				//for ( i = 0; i < SampledCurrentValExampleList.PosList.ExampleAmount; ++i ) SampledCurrentValExampleList.PosList.rgExample[i].Weight *= tw;
#endif
				SampledCurrentValExampleList.NormalizeExampleWeight();
				layer_timer.Start();
				SampledCurrentValExampleList.MakeMixedList(TempMixedExampleList);

#if SAVE_POS_DATA
				sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Val.c_str());
				CLabeledDataSetForClassification_ByFeature::GenerateFeatureFile(
					strTemp,
					&ProxyDataSet_Val, TempMixedExampleList, true,
					CascadeConfig.FeatureMemoryUsage_Train + CascadeConfig.FeatureMemoryUsage_Val);
#else
				sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Val.c_str());
				CLabeledDataSetForClassification_ByFeature::GenerateFeatureFile(
					strTemp,
					pDataSet_Val->pRealDataSet, TempMixedExampleList,
					CascadeConfig.FeatureMemoryUsage_Train + CascadeConfig.FeatureMemoryUsage_Val);
#endif
				layer_timer.Stop();
				sprintf_s(strTemp, "[%s] Finished making [Val  ] feature, %d/%d examples sampled;",
					CExternalTools::GetTimeText(layer_timer.GetElapsedTime()),
					SampledCurrentValExampleList.PosList.ExampleAmount, SampledCurrentValExampleList.NegList.ExampleAmount);
				cout<<strTemp<<endl;
				oLog<<strTemp<<endl;
			}

			if (1)
			{
				CLabeledDataSetForClassification_ByGroupedFeature Real_ds_Train;
				CLabeledDataSetForClassification_ByFeature Real_ds_Val;
				CDataSetForBinaryClassification bds_Train;
				CDataSetForBinaryClassification bds_Val;
				CBinaryClassificationExampleList ForTrainCurrentTrainExampleList;
				CBinaryClassificationExampleList ForTrainCurrentValExampleList;

				sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Train.c_str());
				Real_ds_Train.myInit(strTemp, CascadeConfig.TrainFeatureGroupAmount, CascadeConfig.FeatureMemoryUsage_Train);
				bds_Train.SetRealDataSet(&Real_ds_Train);
				bds_Train.CopyLabelSettingFrom(*pDataSet_Train);
				ForTrainCurrentTrainExampleList.myInit(&bds_Train);

				sprintf_s(strTemp, "%s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Val.c_str());
				Real_ds_Val.myInit(strTemp, CascadeConfig.FeatureMemoryUsage_Val);
				bds_Val.SetRealDataSet(&Real_ds_Val);
				bds_Val.CopyLabelSettingFrom(*pDataSet_Val);
				ForTrainCurrentValExampleList.myInit(&bds_Val);

				bds_Train.InitValidFeatures();

				layer_timer.Start();
				CurrentLayer.TotalTrainedWeakLearnerAmount = total_trained_weak_learner_amount;
				CurrentLayer.Train(
					&bds_Train, ForTrainCurrentTrainExampleList,
					&bds_Val, ForTrainCurrentValExampleList,
					CascadeConfig.AdaBoostConfig, CurrentLayerEndCondition,
					AdaBoostTrainDetails, AdaBoostValDetails,
					oLog
					);
				total_trained_weak_learner_amount = CurrentLayer.TotalTrainedWeakLearnerAmount;
				layer_timer.Stop();

				if (strFN_DstModel != "")
				{
					ofstream outFile_Model(strFN_DstModel.c_str());
					this->OutputToStream(outFile_Model);
					outFile_Model.clear();
					outFile_Model.close();
				}

				sprintf_s(strTemp, "[%s] Finished training AdaBoost classifier for layer %d;",
					CExternalTools::GetTimeText(layer_timer.GetElapsedTime()), LayerAmount);
				cout<<strTemp<<endl;
				oLog<<strTemp<<endl;
			}

			if (1)
			{
				sprintf_s(strTemp, "del %s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Train.c_str());
				CExternalTools::ConsoleTools::RunDosCommand("", strTemp);
				sprintf_s(strTemp, "del %s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_TempFeatureData_Val.c_str());
				CExternalTools::ConsoleTools::RunDosCommand("", strTemp);
			}

			s_time_cost = 0.0;

			// Filtering Train Set
			sprintf_s(strTemp, "Start Filtering Train Example List...");
			cout<<strTemp<<endl;
			oLog<<strTemp<<endl;
#if SAVE_POS_DATA
			this->FilterExampleList(
				&ProxyDataSet_Train, CurrentTrainExampleList,
				LayerAmount - 1, LayerAmount,
				bcrP_cascade_Train,
				bcrP_backup_Train, bcrP_classify_Train,
				bcrC_backup_Train, bcrC_classify_Train,
				t_time_cost, oLog);
#else
			this->FilterExampleList(
				pDataSet_Train, CurrentTrainExampleList,
				LayerAmount - 1, LayerAmount,
				bcrP_cascade_Train,
				bcrP_backup_Train, bcrP_classify_Train,
				bcrC_backup_Train, bcrC_classify_Train,
				t_time_cost, oLog);
#endif
			if (pFilteredExampleList_Train != NULL) pFilteredExampleList_Train->PushBack(CurrentTrainExampleList);

			s_time_cost += t_time_cost;
			sprintf_s(strTemp, "%straining.layer.%d.roc.Train.txt", CascadeConfig.strPN_ForTrain.c_str(), LayerAmount);
			ofstream outFile_TempROC_Train(strTemp);
			bcrC_classify_Train.OutputROCtoStream(outFile_TempROC_Train,
				CascadeConfig.OutputROC_SamplingCount,
				CascadeConfig.OutputROC_MinFPR,
				CascadeConfig.OutputROC_MaxFPR,
				CascadeConfig.OutputROC_fUseLogScale);
			outFile_TempROC_Train.clear();
			outFile_TempROC_Train.close();

			// Filtering Val Set
			sprintf_s(strTemp, "Start Filtering Val Example List...");
			cout<<strTemp<<endl;
			oLog<<strTemp<<endl;
#if SAVE_POS_DATA
			this->FilterExampleList(
				&ProxyDataSet_Val, CurrentValExampleList,
				LayerAmount - 1, LayerAmount,
				bcrP_cascade_Val,
				bcrP_backup_Val, bcrP_classify_Val,
				bcrC_backup_Val, bcrC_classify_Val,
				t_time_cost, oLog);
#else
			this->FilterExampleList(
				pDataSet_Val, CurrentValExampleList,
				LayerAmount - 1, LayerAmount,
				bcrP_cascade_Val,
				bcrP_backup_Val, bcrP_classify_Val,
				bcrC_backup_Val, bcrC_classify_Val,
				t_time_cost, oLog);
#endif
			if (pFilteredExampleList_Val != NULL) pFilteredExampleList_Val->PushBack(CurrentValExampleList);

			s_time_cost += t_time_cost;
			sprintf_s(strTemp, "%straining.layer.%d.roc.Val.txt", CascadeConfig.strPN_ForTrain.c_str(), LayerAmount);
			ofstream outFile_TempROC_Val(strTemp);
			bcrC_classify_Val.OutputROCtoStream(outFile_TempROC_Val,
				CascadeConfig.OutputROC_SamplingCount,
				CascadeConfig.OutputROC_MinFPR,
				CascadeConfig.OutputROC_MaxFPR,
				CascadeConfig.OutputROC_fUseLogScale);
			outFile_TempROC_Val.clear();
			outFile_TempROC_Val.close();

			cout<<":: Layer "<<setw(3)<<LayerAmount<<", #.WC = "<<setw(4)<<CurrentLayer.WcAmount<<";"<<endl;
			cout<<"   Train ";
			bcrP_classify_Train.OutputToStream_w(cout);
			cout<<"   Val   ";
			bcrP_classify_Val.OutputToStream_w(cout);

			oLog<<":: Layer "<<setw(3)<<LayerAmount<<", #.WC = "<<setw(4)<<CurrentLayer.WcAmount<<";"<<endl;
			oLog<<"   Train ";
			bcrP_classify_Train.OutputToStream_w(oLog);
			oLog<<"   Val   ";
			bcrP_classify_Val.OutputToStream_w(oLog);

			s_time_cost += layer_timer.GetCumulativeTime();
			sprintf_s(strTemp, "Layer %d done, taking %s, total weak learner amount = %d;", LayerAmount, CExternalTools::GetTimeText(s_time_cost), total_trained_weak_learner_amount);
			cout<<strTemp<<endl;
			oLog<<strTemp<<endl;

			cout<<endl;
			oLog<<endl;
		}
	}
#if SAVE_POS_DATA
	if (1)
	{
		sprintf_s(strTemp, "del %s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_PosFeatureData_Train.c_str());
		CExternalTools::ConsoleTools::RunDosCommand("", strTemp);
		sprintf_s(strTemp, "del %s%s", CascadeConfig.strPN_ForTrain.c_str(), CascadeConfig.strFN_PosFeatureData_Val.c_str());
		CExternalTools::ConsoleTools::RunDosCommand("", strTemp);
	}
#else
#endif

	full_train_timer.Stop();

	cout<<endl;
	cout.precision(2);
	cout<<"Finished, the full training took "<<CExternalTools::GetTimeText(full_train_timer.GetCumulativeTime())<<";"<<endl;

	oLog.precision(2);
	oLog<<"Finished, the full training took "<<CExternalTools::GetTimeText(full_train_timer.GetCumulativeTime())<<";"<<endl;

	rgLayer.resize(LayerAmount);

	oLog<<endl;
}


void CCascadedAdaBoostClassifier::Train(
	CDataSetForBinaryClassification* pDataSet_Train, CBinaryClassificationExampleList& ExampleList_Train,
	CDataSetForBinaryClassification* pDataSet_Val, CBinaryClassificationExampleList& ExampleList_Val,
	CCascadedAdaBoostParametersConfig& CascadeConfig, 
	ostream& oLog
	)
{
	this->Train("", 0, 0, pDataSet_Train, ExampleList_Train, pDataSet_Val, ExampleList_Val, CascadeConfig, oLog);
}

double CCascadedAdaBoostClassifier::GetMinConfidenceBound(int First_X_Layers) const
{
	if (First_X_Layers<0 || First_X_Layers>LayerAmount) First_X_Layers = LayerAmount;
	if (First_X_Layers==0) return log(0.5);
	double ret = 0.0;
	int i;
	for ( i = 0; i < First_X_Layers; ++i )
		ret -= log(1.0 + exp(+rgLayer[i].AlphaSum));
	ret /= First_X_Layers;
	return ret;
}

double CCascadedAdaBoostClassifier::GetMaxConfidenceBound(int First_X_Layers) const
{
	if (First_X_Layers<0 || First_X_Layers>LayerAmount) First_X_Layers = LayerAmount;
	if (First_X_Layers==0) return log(0.5);
	double ret = 0.0;
	int i;
	for ( i = 0; i < First_X_Layers; ++i )
		ret -= log(1.0 + exp(-rgLayer[i].AlphaSum));
	ret /= First_X_Layers;
	return ret;
}

void CCascadedAdaBoostClassifier::SetAndClearTestingConfig(int TestLayerAmount, bool fSaveClassifyingHistory)
{
	this->TestLayerAmount = TestLayerAmount;
}

void CCascadedAdaBoostClassifier::_Classify(CDataSetForClassification* pDataSet, int ExampleIndex, double& DstConfidence, int& DstPrediction)
{
	int t;
	Classify(pDataSet, ExampleIndex, TestLayerAmount, DstConfidence, DstPrediction, t);
}

void CCascadedAdaBoostClassifier::Classify(
	CDataSetForClassification* pDataSet, int ExampleIndex,
	int paraTestLayerAmount,
	double& DstConfidence, int& DstPrediction, int& resExitLayer
	)
{
	int i, j;
	i = j = 0;

	if (paraTestLayerAmount < 0)
		paraTestLayerAmount = LayerAmount;

	if (paraTestLayerAmount > LayerAmount) paraTestLayerAmount = LayerAmount;
	if (paraTestLayerAmount == 0) { DstConfidence = log(0.5); DstPrediction = +1; resExitLayer = 0; return; }
	double tc;
	double np = 0.0;
	for ( i = 0; i < paraTestLayerAmount; ++i )
	{
		rgLayer[i].Classify(pDataSet, ExampleIndex, tc, DstPrediction);
		np -= log(1.0 + exp(-tc));
		if (!(tc > rgLayer[i].CascadeThreshold))
			break;
	}

	resExitLayer = i;
	if (i < paraTestLayerAmount) DstPrediction = -1;
	for ( ++i; i < paraTestLayerAmount; ++i )
		np -= log(1.0 + exp(rgLayer[i].AlphaSum));
	DstConfidence = np / paraTestLayerAmount;
}

void CCascadedAdaBoostClassifier::Classify(
	CDataSetForClassification* pDataSet, int ExampleIndex,
	int paraTestLayerAmount, int& ExitLayer,
	CSimpleTypeArray<double>& DstConfidenceList)
{
	if (paraTestLayerAmount > LayerAmount) paraTestLayerAmount = LayerAmount;
	if (paraTestLayerAmount < 0) paraTestLayerAmount = LayerAmount;
	DstConfidenceList.Clear();
	int i;
	double tc;
	ExitLayer = paraTestLayerAmount;
	for ( i = 0; i < paraTestLayerAmount; ++i )
	{
		tc = ((CBinaryClassifier*)(&rgLayer[i]))->Classify(pDataSet, ExampleIndex);
		DstConfidenceList.PushBack(tc);
		if (!(tc > rgLayer[i].CascadeThreshold))
		{
			ExitLayer = i;
			++i;
			break;
		}
	}
	for ( ; i < paraTestLayerAmount; ++i )
		DstConfidenceList.PushBack(-rgLayer[i].AlphaSum);
}

int CCascadedAdaBoostClassifier::GetTotalWeakLearnerAmount(int CountingLayerAmount) const
{
	if (CountingLayerAmount<0 || CountingLayerAmount>=LayerAmount) CountingLayerAmount = LayerAmount;
	int ret = 0;
	for ( int i = 0; i < LayerAmount; ++i )
		ret += rgLayer[i].WcAmount;
	return ret;
}

