
#include "cl_nb_NaiveBayes.h"

CSingleClassNaiveBayesModel::CSingleClassNaiveBayesModel()
{
	this->Clear();
}

void CSingleClassNaiveBayesModel::Clear()
{
	FeatureDimension = 0;
	TotalTrainExampleWeightSum = 0.0;
	TotalTrainExampleFeatureValueSum = 0.0;
	vecF_FVS_f_given_c.DeepClear();
	vecF_logP_f_given_c.DeepClear();
	P_c = 0.0; logP_c = 0.0;
}

void CSingleClassNaiveBayesModel::myInit(
	int Label, int FeatureDimension, double SmoothEpsilon )
{
	this->Label = Label;
	this->FeatureDimension = FeatureDimension;
	vecF_FVS_f_given_c.Resize(FeatureDimension);
	vecF_FVS_f_given_c.Fill(SmoothEpsilon);
	vecF_logP_f_given_c.Resize(FeatureDimension);
	vecF_logP_f_given_c.Fill(0.0);
	TotalTrainExampleWeightSum = 0.0;
	TotalTrainExampleFeatureValueSum = SmoothEpsilon * FeatureDimension;
	P_c = 0.0; logP_c = 0.0;
}

double CSingleClassNaiveBayesModel::NewTrainExample(
	CLabeledDataSetForClassification* pSrcLDS,
	int ExampleIndex, double ExampleWeight )
{
	double ret = 0.0;
	CSparseVector tSV;
	pSrcLDS->MakeSparseVector_ByExample(tSV, ExampleIndex);

	const int* qFI = tSV.IndexList.pElement;
	const double* qFV = tSV.ValueList.pElement;
	double* pPF = vecF_FVS_f_given_c.pElement;
	int i, tFI;
	double tFV;

	for ( i = 0; i < tSV.Length; ++i )
	{
		tFI = qFI[i]; tFV = qFV[i] * ExampleWeight;
		pPF[tFI] += tFV;
		ret += tFV;
	}
	TotalTrainExampleWeightSum += ExampleWeight;
	TotalTrainExampleFeatureValueSum += ret;
	return ret;
}

void CSingleClassNaiveBayesModel::FinishTraining(
	double AllClassTotalTrainExampleWeightSum,
	bool fIgnorePriorDistribution )
{
	if (AllClassTotalTrainExampleWeightSum < MINIMUM_DOUBLE_EPS_C) return;
	P_c = TotalTrainExampleWeightSum / AllClassTotalTrainExampleWeightSum;
	if (fIgnorePriorDistribution)
		logP_c = 0.0;
	else
		logP_c = CMyMath::my_log(P_c);
	const double* qPF = vecF_FVS_f_given_c.pElement;
	double* pLF = vecF_logP_f_given_c.pElement;
	int i;
	for ( i = 0; i < FeatureDimension; ++i )
	{
		pLF[i] = CMyMath::my_log(qPF[i] / TotalTrainExampleFeatureValueSum);
	}
}

double CSingleClassNaiveBayesModel::Classify( 
	const CSparseVector& FeatureVector ) const
{
	const int* qFI = FeatureVector.IndexList.pElement;
	const double* qFV = FeatureVector.ValueList.pElement;
	const double* qLF = vecF_logP_f_given_c.pElement;
	int i, tFI;
	double tFV;

	double ret = logP_c;
	for ( i = 0; i < FeatureVector.Length; ++i )
	{
		tFI = qFI[i]; tFV = qFV[i];
		ret += tFV * qLF[tFI];
	}

	return ret;
}

double CSingleClassNaiveBayesModel::Classify( 
	CDataSetForClassification* pSrcLDS,
	int ExampleIndex ) const
{
	CSparseVector tSV;
	pSrcLDS->MakeSparseVector_ByExample(tSV, ExampleIndex);
	return Classify(tSV);
}

// [*] CMultiClassNaiveBayesModel

CMultiClassNaiveBayesModel::CMultiClassNaiveBayesModel()
{
	Clear();
}

void CMultiClassNaiveBayesModel::Clear()
{
	AmountClass = 0;
	FeatureDimension = 0;
	TotalExampleWeightSum = 0.0;
	ListNBM.DeepClear();
	lbl2mdl.clear();
}

void CMultiClassNaiveBayesModel::Train(
	CLabeledDataSetForClassification* pSrcLDS,
	const CWeightedClassificationExampleList& TrainingExampleList,
	bool fIgnorePriorDistribution )
{
	Clear();
	FeatureDimension = pSrcLDS->GetFeatureDimension();
	TotalExampleWeightSum = 0.0;

	int i_ei, i_example;
	int tLabel;
	double tExampleWeight;
	CLabelMapping::const_iterator itr;
	int ExampleAmount = pSrcLDS->GetExampleAmount();

	double SmoothEpsilon = TrainingExampleList.TotalExampleWeight / 10000.0;
	for ( i_ei = 0; i_ei < TrainingExampleList.ExampleAmount; ++i_ei )
	{
		i_example = TrainingExampleList.rgExample[i_ei].ExampleIndex;
		tExampleWeight = TrainingExampleList.rgExample[i_ei].Weight;
		TotalExampleWeightSum += tExampleWeight;
		tLabel = pSrcLDS->GetExampleLabel(i_example);
		itr = lbl2mdl.find(tLabel);
		if (itr == lbl2mdl.end())
		{
			lbl2mdl[tLabel] = AmountClass++;
			itr = lbl2mdl.find(tLabel);
			ListNBM.Resize(AmountClass);
			ListNBM.pElement[AmountClass-1].myInit(
				tLabel, FeatureDimension, SmoothEpsilon);
		}
		CSingleClassNaiveBayesModel& tNBM = ListNBM.pElement[itr->second];
		tNBM.NewTrainExample(pSrcLDS, i_example, tExampleWeight);
	}

	ToggleWhetherIgnorePriorDistribution(fIgnorePriorDistribution);
}

void CMultiClassNaiveBayesModel::ToggleWhetherIgnorePriorDistribution( bool fIgnorePriorDistribution )
{
	int i_class;
	for ( i_class = 0; i_class < AmountClass; ++i_class )
	{
		CSingleClassNaiveBayesModel& tNBM = ListNBM.pElement[i_class];
		tNBM.FinishTraining(TotalExampleWeightSum, fIgnorePriorDistribution);
	}
}

int CMultiClassNaiveBayesModel::Classify(
	CDataSetForClassification* pSrcLDS, int ExampleIndex,
	int& DstLabel, double& DstConfidence ) const
{
	CSparseVector tSV;
	pSrcLDS->MakeSparseVector_ByExample(tSV, ExampleIndex);
	return Classify(tSV, DstLabel, DstConfidence);
}

int CMultiClassNaiveBayesModel::Classify(
	const CSparseVector& FeatureVector,
	int& DstLabel, double& DstConfidence ) const
{
	int ret = -1;
	DstLabel = 0; DstConfidence = 0.0;

	int i_class;
	double tC;
	for ( i_class = 0; i_class < AmountClass; ++i_class )
	{
		CSingleClassNaiveBayesModel& tNBM = ListNBM.pElement[i_class];
		tC = tNBM.Classify(FeatureVector);
		if (ret<0 || DstConfidence<tC)
		{	ret = i_class; DstLabel = tNBM.Label; DstConfidence = tC;	}
	}

	return ret;
}

