
#include "20News.Data.h"

// [*] CVocabulary

void twenty_news_group::CVocabulary::LoadFromFile( string strFN_VocabularyConfig )
{
	Clear();
	ifstream inFile(strFN_VocabularyConfig.c_str());
	string strTemp;
	while (inFile>>strTemp)
	{
		++TermAmount;
		TermList.PushBack(strTemp);
		TermMap[strTemp] = TermAmount - 1;
	}
	inFile.close();
	inFile.clear();
}

void twenty_news_group::CVocabulary::SaveToFile( string strFN_Vocabulary ) const
{
	ofstream outFile(strFN_Vocabulary.c_str());
	int i;
	for ( i = 0; i < TermAmount; ++i )
	{
		outFile<<TermList.pElement[i]<<endl;
	}
	outFile.close();
	outFile.clear();
}

twenty_news_group::CVocabulary::CVocabulary()
{
	Clear();
}

void twenty_news_group::CVocabulary::Clear()
{
	TermAmount = 0;
	TermList.Clear();
	TermMap.clear();
}

int twenty_news_group::CVocabulary::GetTermOriginalIndex( string strTerm ) const
{
	int ret = -1;
	CTermMap::const_iterator itr = TermMap.find(strTerm);
	if (itr != TermMap.end())
		ret = itr->second;
	return ret;
}

// [*] CSelectedVocabulary

void twenty_news_group::CSelectedVocabulary::InitVocabulary( const CVocabulary* pVocabulary )
{
	this->pVocabulary = pVocabulary;
	TermSelectedFlagList.Resize(pVocabulary->TermAmount);
	TermSelectedFlagList.Fill(0);
}

void twenty_news_group::CSelectedVocabulary::MakeSelectedList()
{
	int i;
	SelectedAmount = 0;
	SelectedTermIndexList.Clear();
	for ( i = 0; i < (int)TermSelectedFlagList.Size; ++i )
	{
		int& t = TermSelectedFlagList.pElement[i];
		if (t < 0) continue;
		t = SelectedAmount++;
		SelectedTermIndexList.PushBack(i);
	}
}

void twenty_news_group::CSelectedVocabulary::LoadFromFile( string strFN_VocabularySelection )
{
	ifstream inFile(strFN_VocabularySelection.c_str());
	TermSelectedFlagList.Fill(-1);
	int ti;
	while (inFile>>ti)
	{
		if (ti>=0 && ti<pVocabulary->TermAmount)
			TermSelectedFlagList.pElement[ti] = 0;
	}
	MakeSelectedList();
	inFile.close();
	inFile.clear();
}

void twenty_news_group::CSelectedVocabulary::SaveToFile( string strFN_VocabularySelection ) const
{
	ofstream outFile(strFN_VocabularySelection.c_str());
	int i;
	for ( i = 0; i < (int)TermSelectedFlagList.Size; ++i )
	{
		if (TermSelectedFlagList.pElement[i] < 0) continue;
		outFile<<i<<endl;
	}
	outFile.close();
	outFile.clear();
}

// [*] CTermOccurrence

twenty_news_group::CTermOccurrence::CTermOccurrence()
{
	this->TermIndex = 0;
	this->Amount = 0;
}

twenty_news_group::CTermOccurrence::CTermOccurrence( int TermIndex, int Amount )
{
	this->TermIndex = TermIndex;
	this->Amount = Amount;
}

bool twenty_news_group::CTermOccurrence::operator<( const CTermOccurrence& Another ) const
{
	return this->TermIndex < Another.TermIndex;
}

// [*] CDiscreteDocument

void twenty_news_group::CDiscreteDocument::Clear()
{
	this->TermTotalAmount = 0;
	this->TermKindAmount = 0;
	this->TermOccurrenceList.Clear();
}

void twenty_news_group::CDiscreteDocument::NewTerm( int TermIndex, int Amount )
{
	if (Amount < 1) return;
	++TermKindAmount;
	TermTotalAmount += Amount;
	TermOccurrenceList.PushBack(CTermOccurrence(TermIndex, Amount));
}

void twenty_news_group::CDiscreteDocument::SortAndUnique()
{
	sort(TermOccurrenceList.Begin(), TermOccurrenceList.End());
	int i;
	CTermOccurrence* pto = TermOccurrenceList.pElement;
	TermKindAmount = 0;
	for ( i = 0; i < (int)TermOccurrenceList.Size; ++i )
	{
		if (i==0 || pto[TermKindAmount-1].TermIndex < pto[i].TermIndex)
			pto[TermKindAmount++] = pto[i];
		else
			pto[TermKindAmount-1].Amount += pto[i].Amount;
	}
	TermOccurrenceList.Resize(TermKindAmount);
}

bool twenty_news_group::CDiscreteDocument::operator<( const CDiscreteDocument& Another ) const
{
	return this->DocumentIndex < Another.DocumentIndex;
}


int twenty_news_group::CDiscreteDocument::GetTermOccurrenceAmount( int TermIndex ) const
{
	if (this->TermTotalAmount == 0) return 0;

	CTermOccurrence tto;
	tto.TermIndex = TermIndex;
	const CTermOccurrence* qto = lower_bound(TermOccurrenceList.Begin(), TermOccurrenceList.End(), tto);

	if (qto==TermOccurrenceList.End() || qto->TermIndex!=TermIndex) return 0;

	return qto->Amount;
}

// [*] CDenseDocument

// [*] CDocumentList

twenty_news_group::CDocumentList::CDocumentList()
{
	pVocabulary = NULL;
	pSelectedVocabulary = NULL;
	pIDF = NULL;
}

void twenty_news_group::CDocumentList::InitVocabulary( const CVocabulary* pVocabulary )
{
	this->pVocabulary = pVocabulary;
}

void twenty_news_group::CDocumentList::InitSelectedVocabulary( CSelectedVocabulary* pSelectedVocabulary )
{
	this->pSelectedVocabulary = pSelectedVocabulary;
}

void twenty_news_group::CDocumentList::InitIDF( const double* pIDF )
{
	this->pIDF = pIDF;
}

void twenty_news_group::CDocumentList::InitDocumentList( string strFN_Data, string strFN_Label )
{
	if (pVocabulary == NULL) return;

	int di, ti, tc;
	ifstream inFile_Data(strFN_Data.c_str());
	ifstream inFile_Label(strFN_Label.c_str());
	
	CTermOccurrence tto;
	CDiscreteDocument tdd;

	tdd.DocumentIndex = -1;
	tdd.Clear();

	int max_term_index = -1;

	DocumentAmount = 0;
	for ( ; ; )
	{
		if (inFile_Data>>di>>ti>>tc)
		{
			--di; --ti;
		}
		else
		{
			di = ti = tc = -2;
		}

		if (di != tdd.DocumentIndex)
		{
			if (tdd.DocumentIndex >= 0)
			{
				tdd.SortAndUnique();
				if (tdd.TermKindAmount > 0)
				{
					++DocumentAmount;
					inFile_Label>>tdd.DocumentLabel;
					DocumentList.PushBack(tdd);
				}
			}
			tdd.Clear();
			tdd.DocumentIndex = di;
			if (di < 0) break;
		}

		if (ti < 0) continue;
		if (ti > max_term_index) max_term_index = ti;
		tdd.NewTerm(ti, tc);
	}

	sort(DocumentList.Begin(), DocumentList.End());

	inFile_Data.close();
	inFile_Data.clear();
	inFile_Label.close();
	inFile_Label.clear();
}

double twenty_news_group::CDocumentList::ComputeIDF( CSimpleTypeArray<double>& IDF_List )
{
	CWeightedClassificationExampleList tList;
	CWeightedClassificationExample tExample;
	tList.Clear();
	int i;
	for ( i = 0; i < DocumentAmount; ++i )
	{
		tExample.ExampleIndex = i;
		tExample.Weight = this->GetExampleInitialWeight(i);
		tList.PushBack(tExample);
	}
	return ComputeIDF(IDF_List, tList);
}

double twenty_news_group::CDocumentList::ComputeIDF(
	CSimpleTypeArray<double>& IDF_List,
	const CWeightedClassificationExampleList& SelectedDocumentList)
{
	int i, j;
	double tt, ret;
	IDF_List.Resize(pVocabulary->TermAmount);
	IDF_List.Fill(0.0);
	if (SelectedDocumentList.ExampleAmount < 1) return 0.0;

	for ( i = 0; i < SelectedDocumentList.ExampleAmount; ++i )
	{
		const CDiscreteDocument& tdd = DocumentList.pElement[SelectedDocumentList.rgExample[i].ExampleIndex];
		for ( j = 0; j < tdd.TermKindAmount; ++j )
		{
			const CTermOccurrence& tto = tdd.TermOccurrenceList.pElement[j];
			if (tto.TermIndex<0 || tto.TermIndex>=pVocabulary->TermAmount) continue;
			IDF_List.pElement[tto.TermIndex] += 1.0;
		}
	}
	ret = CMyMath::my_log(SelectedDocumentList.ExampleAmount);
	for ( i = 0; i < pVocabulary->TermAmount; ++i )
	{
		tt = IDF_List.pElement[i];
		tt = CMyMath::log_Product(ret, -CMyMath::my_log(tt));
		IDF_List.pElement[i] = tt;
	}
	return ret;
}

void twenty_news_group::CDocumentList::DoFeatureSelection(
	CSelectedVocabulary& DstSelectVocabularyList, CSimpleTypeArray<double>& Dst_IDF_List,
	int DF_Threshold)
{
	CWeightedClassificationExampleList tList;
	CWeightedClassificationExample tExample;
	tList.Clear();
	int i;
	for ( i = 0; i < DocumentAmount; ++i )
	{
		tExample.ExampleIndex = i;
		tExample.Weight = this->GetExampleInitialWeight(i);
		tList.PushBack(tExample);
	}
	return DoFeatureSelection(DstSelectVocabularyList, Dst_IDF_List, DF_Threshold, tList);
}

void twenty_news_group::CDocumentList::DoFeatureSelection(
	CSelectedVocabulary& DstSelectVocabularyList, CSimpleTypeArray<double>& Dst_IDF_List,
	int DF_Threshold, const CWeightedClassificationExampleList& SelectedDocumentList)
{
	int i;
	double tIDF;
	double IDF_Threshold;
	IDF_Threshold = this->ComputeIDF(Dst_IDF_List, SelectedDocumentList);
	IDF_Threshold = CMyMath::log_Product(IDF_Threshold, -CMyMath::my_log(DF_Threshold));
	for ( i = 0; i < pVocabulary->TermAmount; ++i )
	{
		tIDF = Dst_IDF_List.pElement[i];
		DstSelectVocabularyList.TermSelectedFlagList.pElement[i] = (tIDF > IDF_Threshold ? -1 : 0);
	}
	DstSelectVocabularyList.MakeSelectedList();
}

int twenty_news_group::CDocumentList::GetExampleAmount()
{
	return DocumentAmount;
}

double twenty_news_group::CDocumentList::GetExampleInitialWeight( int ExampleIndex )
{
	return 1.0;
}

int twenty_news_group::CDocumentList::GetFeatureDimension()
{
	return pSelectedVocabulary->SelectedAmount;
}

double twenty_news_group::CDocumentList::GetExampleFeature( int ExampleIndex, int FeatureIndex )
{
	if (ExampleIndex<0 || ExampleIndex>=DocumentAmount) return 0.0;
	if (FeatureIndex<0 || FeatureIndex>=pSelectedVocabulary->SelectedAmount) return 0.0;
	
	const CDiscreteDocument& tdd = DocumentList.pElement[ExampleIndex];
	int ti = pSelectedVocabulary->SelectedTermIndexList.pElement[FeatureIndex];
	int toa = tdd.GetTermOccurrenceAmount(ti);
	return (pIDF==NULL ? 1.00 : pIDF[ti]) * toa / tdd.TermTotalAmount;
}

int twenty_news_group::CDocumentList::GetExampleLabel( int ExampleIndex )
{
	if (ExampleIndex<0 || ExampleIndex>=DocumentAmount) return -1;
	return DocumentList.pElement[ExampleIndex].DocumentLabel;
}

double twenty_news_group::CDocumentList::GetDataDensity()
{
	return this->ComputeDataDensity();
}

void twenty_news_group::CDocumentList::MakeSparseVector_ByExample( CSparseVector& DstFeatureVector, int SrcExampleIndex )
{
	DstFeatureVector.Clear();
	if (SrcExampleIndex<0 || SrcExampleIndex>=DocumentAmount) return;
	const CDiscreteDocument& tdd = DocumentList.pElement[SrcExampleIndex];
	int i, ii, j, jj;
	double t;
	i = 0; j = 0; ii = tdd.TermKindAmount; jj = pSelectedVocabulary->SelectedAmount;
	const CTermOccurrence* QT = tdd.TermOccurrenceList.pElement;
	const int* QV = pSelectedVocabulary->SelectedTermIndexList.pElement;
	while (i<ii && j<jj)
	{
		while (i<ii && QT[i].TermIndex<QV[j]) ++i;
		while (j<jj && QT[i].TermIndex>QV[j]) ++j;
		if (i<ii && j<jj && QT[i].TermIndex==QV[j])
		{
			t = (pIDF==NULL ? 1.00 : pIDF[QT[i].TermIndex]) * QT[i].Amount / tdd.TermTotalAmount;
			DstFeatureVector.PushBack(j, t);
			++i; ++j;
		}
	}
}

// [*] CTargetBinaryLabelSets

void twenty_news_group::CTargetBinaryLabelSets::myInit( string strFN_LabelSettings )
{
	CSimpleTypeArray<int> PosLabelSet;
	CSimpleTypeArray<int> NegLabelSet;
	string strTemp;

	ifstream inFile(strFN_LabelSettings.c_str());

	PosLabelSet.Clear();
	while (inFile>>strTemp && strTemp!="#" && strTemp!="!") PosLabelSet.PushBack(atoi(strTemp.c_str()));
	if (strTemp=="!")
		BinaryClassificationDataSetPrototype.SetPosLabelSetByNonNeg();
	else
		BinaryClassificationDataSetPrototype.SetPosLabelSet(PosLabelSet);

	NegLabelSet.Clear();
	while (inFile>>strTemp && strTemp!="#" && strTemp!="!") NegLabelSet.PushBack(atoi(strTemp.c_str()));
	if (strTemp=="!")
		BinaryClassificationDataSetPrototype.SetNegLabelSetByNonPos();
	else
		BinaryClassificationDataSetPrototype.SetNegLabelSet(NegLabelSet);

	inFile.close();
	inFile.clear();
}

