
#include "cl_knn_base.h"

// [*] CCosineKNN_Model::CParameters

CCosineKNN_Model::CParameters::CParameters()
{
	K = 1;
	TransposeMode = TRANSPOSE_MODE_BY_EXAMPLE;
	strFN_TrainData = "knn.source_data.dat";
}

bool CCosineKNN_Model::CParameters::InputFromStream( istream& inStream )
{
	bool ret = true;
	ret = ret && (inStream>>K>>TransposeMode);
	getline(inStream, strFN_TrainData);
	ret = ret && (getline(inStream, strFN_TrainData));
	return ret;
}

void CCosineKNN_Model::CParameters::OutputToStream( ostream& outStream ) const
{
	outStream<<K<<"\t"<<TransposeMode<<endl;
	outStream<<strFN_TrainData<<endl;
}

// [*] CCosineKNN_Model::CInnerExampleRecord

bool CCosineKNN_Model::CInnerExampleRecord::CompareByIndex(const CInnerExampleRecord& a, const CInnerExampleRecord& b)
{
	return a.Index < b.Index;
}

bool CCosineKNN_Model::CInnerExampleRecord::CompareByLabel( const CInnerExampleRecord& a, const CInnerExampleRecord& b )
{
	return a.Label < b.Label;
}

bool CCosineKNN_Model::CInnerExampleRecord::CompareBySimilarity( const CInnerExampleRecord& a, const CInnerExampleRecord& b )
{
	return a.Similarity > b.Similarity;
}

// [*] CCosineKNN_Model

CCosineKNN_Model::CCosineKNN_Model()
{
	MyNormalizer = CMyNormalizer_L2_NORM(MINIMUM_DOUBLE_EPS_V);
}

bool CCosineKNN_Model::InputFromStream( istream& inStream )
{
	bool ret = true;
	ret = ret && Parameters.InputFromStream(inStream);
	ret = ret && (inStream>>FeatureDimension>>ExampleAmount);
	if (ret)
		BinaryClassificationLabelSettings.InputLabelSettingsFromStream(inStream);
	if (ret)
	{
		ExampleLabelList.Resize(ExampleAmount);
		ExampleWeightList.Resize(ExampleAmount);
		int i;
		for ( i = 0; (ret && i<ExampleAmount); ++i )
			ret = ret && (inStream>>ExampleLabelList.pElement[i]>>ExampleWeightList.pElement[i]);
	}
	return ret;
}

void CCosineKNN_Model::OutputToStream( ostream& outStream )
{
	Parameters.OutputToStream(outStream);
	outStream<<FeatureDimension<<"\t"<<ExampleAmount<<endl;
	if (1)
		BinaryClassificationLabelSettings.OutputLabelSettingsToStream(outStream);
	if (1)
	{
		int i;
		outStream.precision(9);
		for ( i = 0; i < ExampleAmount; ++i )
			outStream<<ExampleLabelList.pElement[i]<<"\t"<<fixed<<ExampleWeightList.pElement[i]<<endl;
	}
}

bool CCosineKNN_Model::LoadFromFile( string strFN_Model, int ModelMemoryCacheSize_InMB )
{
	bool ret = true;
	ifstream inFile(strFN_Model.c_str());
	ret = ret && InputFromStream(inFile);
	if (ret)
	{
		_InitTrainData(ModelMemoryCacheSize_InMB);
	}
	inFile.close();
	inFile.clear();
	return ret;
}

void CCosineKNN_Model::SaveToFile( string strFN_Model )
{
	ofstream outFile(strFN_Model.c_str());
	OutputToStream(outFile);
	outFile.close();
	outFile.clear();
}

void CCosineKNN_Model::Train(
	CLabeledDataSetForClassification* pLabeledDataSet_Train, CWeightedClassificationExampleList& ExampleList_Train,
	const CParameters& Parameters, int TransposeMemoryCacheSize_InMB, int ModelMemoryCacheSize_InMB)
{
	FeatureDimension = pLabeledDataSet_Train->GetFeatureDimension();
	ExampleAmount = ExampleList_Train.ExampleAmount;
	this->Parameters = Parameters;

	ExampleLabelList.Resize(ExampleAmount);
	ExampleWeightList.Resize(ExampleAmount);
	{
		int i, t;
		ExampleList_Train.ComputeTotalExampleWeight();
		double sw = ExampleAmount / ExampleList_Train.TotalExampleWeight;
		for ( i = 0; i < ExampleAmount; ++i )
		{
			t = ExampleList_Train.rgExample[i].ExampleIndex;
			ExampleLabelList.pElement[i] = pLabeledDataSet_Train->GetExampleLabel(t);
			ExampleWeightList.pElement[i] = ExampleList_Train.rgExample[i].Weight * sw;
		}
	}

	TrainData.myRelease();
	_TransposeTrainData(pLabeledDataSet_Train, ExampleList_Train, TransposeMemoryCacheSize_InMB);
	_InitTrainData(ModelMemoryCacheSize_InMB);
}

void CCosineKNN_Model::Train(
	CDataSetForClassification* pDataSet_Train, CBinaryClassificationExampleList& BinaryExampleList_Train,
	const CParameters& Parameters, int TransposeMemoryCacheSize_InMB, int ModelMemoryCacheSize_InMB)
{
	FeatureDimension = pDataSet_Train->GetFeatureDimension();
	ExampleAmount = BinaryExampleList_Train.GetTotalExampleAmount();
	this->Parameters = Parameters;

	CWeightedClassificationExampleList ExampleList_Train;
	BinaryExampleList_Train.MakeMixedList(ExampleList_Train);

	ExampleLabelList.Resize(ExampleAmount);
	ExampleWeightList.Resize(ExampleAmount);
	{
		int i, t, pa;
		pa = BinaryExampleList_Train.PosList.ExampleAmount;
		ExampleList_Train.ComputeTotalExampleWeight();
		double sw = ExampleAmount / ExampleList_Train.TotalExampleWeight;
		for ( i = 0; i < ExampleAmount; ++i )
		{
			t = ExampleList_Train.rgExample[i].ExampleIndex;
			ExampleLabelList.pElement[i] = (i<pa ? +1 : -1);
			ExampleWeightList.pElement[i] = ExampleList_Train.rgExample[i].Weight * sw;
		}
	}

	CSimpleTypeArray<int> ti;
	ti.Clear(); ti.PushBack(+1); BinaryClassificationLabelSettings.SetPosLabelSet(ti);
	ti.Clear(); ti.PushBack(-1); BinaryClassificationLabelSettings.SetNegLabelSet(ti);

	TrainData.myRelease();
	_TransposeTrainData(pDataSet_Train, ExampleList_Train, TransposeMemoryCacheSize_InMB);
	_InitTrainData(ModelMemoryCacheSize_InMB);
}

void CCosineKNN_Model::_TransposeTrainData(
	CDataSetForClassification* pDataSet_Train, CWeightedClassificationExampleList& ExampleList_Train,
	int TransposeMemoryCacheSize_InMB)
{
	char strTemp[10001];
	CSparseVector tSparseVector;
	if (Parameters.TRANSPOSE_MODE_BY_EXAMPLE == CParameters::TRANSPOSE_MODE_BY_EXAMPLE)
	{
		double DataDensity = pDataSet_Train->GetDataDensity();
		double AvgFeatureSize = DataDensity * ExampleAmount * CSparseVector::GetIVPairMemorySize();
		double TMCS_InByte = 1048576.0 * TransposeMemoryCacheSize_InMB;
		int FeaturePoolSize = (int)(TMCS_InByte / AvgFeatureSize);
		FeaturePoolSize = (FeaturePoolSize<1 ? 1 : (FeaturePoolSize>FeatureDimension ? FeatureDimension : FeaturePoolSize));

		C_LDS_Sparse_ByExample::CDataFileWriter DataFileWriter;
		DataFileWriter.myInit(Parameters.strFN_TrainData, ExampleAmount);
		int i1, i2, j, o, oo;
		for ( i1 = 0; i1 < FeatureDimension; i1 += FeaturePoolSize)
		{
			i2 = i1 + FeaturePoolSize; if (i2 > FeatureDimension) i2 = FeatureDimension;
			CSimpleTypeArray<CSparseVector> FeaturePool;
			FeaturePool.Resize(i2 - i1);
			for ( j = 0; j < ExampleAmount; ++j )
			{
				pDataSet_Train->MakeSparseVector_ByExample(tSparseVector, ExampleList_Train.rgExample[j].ExampleIndex);
				const int* QI = tSparseVector.IndexList.pElement;
				const double* QV = tSparseVector.ValueList.pElement;
				oo = tSparseVector.Length;
				o = (int)(lower_bound(QI, QI+oo, i1) - QI);
				tSparseVector.Normalize(&MyNormalizer);
				for ( ; (o<oo && QI[o]<i2); ++o )
					FeaturePool.pElement[QI[o] - i1].PushBack(j, QV[o]);

				if (CMyRandom::NextInt(ExampleAmount/100)==0 || j+1==ExampleAmount)
				{
					sprintf_s(strTemp, "KNN Train Data Transposing: <F,E> = [%d~%d/%d, %d/%d]...",
						i1+1, i2+1, FeatureDimension, j+1, ExampleAmount);
					CExternalTools::ConsoleTools::MakeCurrentLine(strTemp, false);
				}
			}
			sprintf_s(strTemp, "KNN Train Data Transposing: writing feature [%d~%d/%d]...",
				i1+1, i2+1, FeatureDimension);
			CExternalTools::ConsoleTools::MakeCurrentLine(strTemp, false);
			for ( o = i1; o < i2; ++o )
				DataFileWriter.PushBack(FeaturePool.pElement[o - i1], o, 1.0);
		}
		DataFileWriter.Finish();
	}
	else
	{
		//TODO
	}

	sprintf_s(strTemp, "KNN Train Data Transposing Finished;");
	CExternalTools::ConsoleTools::MakeCurrentLine(strTemp, true);
}

void CCosineKNN_Model::_InitTrainData( int ModelMemoryCacheSize_InMB )
{
	TrainData.myInit(Parameters.strFN_TrainData, ModelMemoryCacheSize_InMB);
	InnerExampleRecordList.Resize(ExampleAmount);
	InnerExampleRecordList.Clear();
	//int i;
	//CInnerExampleRecord tInnerExampleRecord;
	//for ( i = 0; i < ExampleAmount; ++i )
	//{
	//	tInnerExampleRecord.Index = i;
	//	tInnerExampleRecord.Label = ExampleLabelList.pElement[i];
	//	tInnerExampleRecord.Weight = ExampleWeightList.pElement[i];
	//	tInnerExampleRecord.Similarity = 0.0;
	//	InnerExampleRecordList.PushBack(tInnerExampleRecord);
	//}
}

void CCosineKNN_Model::Classify_ToVector(CDataSetForClassification* pDataSet, int ExampleIndex, CSparseVector& DstResult)
{
	CMapping_ExampleSimilarity::CIterator itr;
	CInnerExampleRecord tInnerExampleRecord;
	CSparseVector tSparseVector;

	pDataSet->MakeSparseVector_ByExample(tSparseVector, ExampleIndex);
	tSparseVector.Normalize(&MyNormalizer);

	MES.Clear();
	int i, j, fi;
	double tv_sv, vv, sv;
	for ( i = 0; i < tSparseVector.Length; ++i )
	{
		fi = tSparseVector.IndexList.pElement[i];
		tv_sv = tSparseVector.ValueList.pElement[i];
		const CSparseVector& oSparseVector = TrainData.GetExampleFeatureSparseVector(fi);
		for ( j = 0; j < oSparseVector.Length; ++j )
		{
			vv = oSparseVector.ValueList.pElement[j] * tv_sv;
			MES.AddTo(oSparseVector.IndexList.pElement[j], vv);
		}
	}

	if (MES.Size() == 0)
	{
		InnerExampleRecordList.Clear();
		return;
	}

	InnerExampleRecordList.Clear();
	for ( itr = MES.Mapping.begin(); itr != MES.Mapping.end(); ++itr )
	{
		tInnerExampleRecord.Index = i = itr->first;
		tInnerExampleRecord.Label = ExampleLabelList.pElement[i];
		tInnerExampleRecord.Weight = ExampleWeightList.pElement[i];
		tInnerExampleRecord.Similarity = itr->second;
		InnerExampleRecordList.PushBack(tInnerExampleRecord);
	}
	sort(InnerExampleRecordList.Begin(), InnerExampleRecordList.End(), CInnerExampleRecord::CompareBySimilarity);

	sv = 0.0;
	for ( fi = 0; (sv<Parameters.K && fi<(int)InnerExampleRecordList.Size); ++fi )
		sv += InnerExampleRecordList.pElement[fi].Weight;
	InnerExampleRecordList.Resize(fi);

	if (sv < MINIMUM_DOUBLE_EPS_C)
	{
		InnerExampleRecordList.Clear();
		return;
	}

	MES.Clear();
	const CInnerExampleRecord* qInnerExampleRecord = InnerExampleRecordList.pElement;
	for ( i = 0; i < fi; ++i )
		MES.AddTo(qInnerExampleRecord[i].Label, qInnerExampleRecord[i].Weight / sv);
	DstResult.Clear();
	for ( itr = MES.Mapping.begin(); itr != MES.Mapping.end(); ++itr )
		DstResult.PushBack(itr->first, itr->second);
	DstResult.SortByValue();
}

void CCosineKNN_Model::Classify_BestLabel( CDataSetForClassification* pDataSet, int ExampleIndex, int& DstPrediction, double& DstConfidence )
{
	this->Classify_ToVector(pDataSet, ExampleIndex, ClassifyResult);

	DstPrediction = ClassifyResult.IndexList.pElement[ClassifyResult.Length - 1];
	DstConfidence = ClassifyResult.ValueList.pElement[ClassifyResult.Length - 1];
}

void CCosineKNN_Model::_Classify( CDataSetForClassification* pDataSet, int ExampleIndex, double& DstConfidence, int& DstPrediction )
{
	this->Classify_ToVector(pDataSet, ExampleIndex, ClassifyResult);

	int i;
	double PosWeight, NegWeight, UnknownWeight;
	const int* qL = ClassifyResult.IndexList.pElement;
	const double* qW = ClassifyResult.ValueList.pElement;
	PosWeight = NegWeight = UnknownWeight = 0.0;
	for ( i = 0; i < ClassifyResult.Length; ++i )
	{
		if (BinaryClassificationLabelSettings.IsLabelPos(qL[i]))
			PosWeight += qW[i];
		else if (BinaryClassificationLabelSettings.IsLabelNeg(qL[i]))
			NegWeight += qW[i];
		else
			UnknownWeight += qW[i];
	}

	DstPrediction = (PosWeight>NegWeight ? +1 : -1);
	DstConfidence = PosWeight - NegWeight;
}

