//==============================================================================================================================================================
// Author(s): Roman Khmelichek, Wei Jiang
//==============================================================================================================================================================

#include "polyIRIndexer.h"

#include <algorithm>
#include <boost/algorithm/string.hpp>
#include <boost/version.hpp>
#include <fstream>
#include <iostream>

// for the machine pangolin
// #include <python2.6/Python.h>
// for the machine dodo
#include <python2.7/Python.h>

#include <sstream>
#include <time.h>

using namespace std;

struct CommandLineArgs {

  CommandLineArgs() :
    role(kNoRole),
    mode(kNoIdea),
    merge_degree(0),
    output_index_prefix(NULL),
    term(NULL),
    term_len(0),
    in_memory_index(false),
    memory_mapped_index(false),
    use_external_index(false),
    doc_mapping_file(NULL),
    query_stop_words_list_file(NULL),

    //Already implemented in the command line.
    //original version
    query_algorithm_local(LocalQueryProcessor::kDefault),
    query_mode_local(LocalQueryProcessor::kInteractive),
    result_format_local(LocalQueryProcessor::kNormal){
  }

  ~CommandLineArgs() {
  }

  enum Mode {
    kIndex, kMergeInitial, kMergeInput, kQuery, kRemap, kLayerify, kCat, kDiff, kRetrieveIndexData, kLoopOverIndexData, kNoIdea, kPrune
  };

  enum Role{
    kLocal, kNoRole
  };

  IndexFiles index_files1;
  IndexFiles index_files2;

  Role role;
  Mode mode;


  int merge_degree;

  const char* output_index_prefix;

  const char* term;
  int term_len;

  bool in_memory_index;
  bool memory_mapped_index;

  bool use_external_index;

  const char* doc_mapping_file;

  const char* query_stop_words_list_file;

  // already asscoiated with the command line.
  LocalQueryProcessor::QueryAlgorithm query_algorithm_local;
  LocalQueryProcessor::QueryMode query_mode_local;
  LocalQueryProcessor::ResultFormat result_format_local;
};

static CommandLineArgs command_line_args;

static const char document_collections_doc_id_ranges_filename[] = "document_collections_doc_id_ranges";

// Overrides the options set in the configuration file or adds new options to the configuration as specified on the command line.
// Syntax for 'options': key1=value1;key2=value2;
// Note that each key/value pair must end with a semicolon, except the last pair, which is optional for convenience.
// When entering on the command line, the semicolon char ';' is considered a special character by the shell and so
// must be escaped by prepending a '\' character in front.
// Example:
// $ ./irtk --index --config-options=document_collection_format=trec\;include_positions=false\;new_option=1
void OverrideConfigurationOptions(const string& options) {
  cout << "Overriding the following configuration file options: " << endl;

  size_t option_start = 0;
  size_t option_end = 0;
  size_t last_option_start = 0;

  while ((option_end = options.find(';', option_start)) != string::npos) {
    string key_value = options.substr(option_start, (option_end - option_start));
    ++option_end;
    option_start = option_end;
    last_option_start = option_start;
    SetConfigurationOption(key_value);
  }

  // The only option specified or the last option specified didn't end with a semicolon.
  if (option_start == 0 || option_start != options.size()) {
    string key_value = options.substr(last_option_start);
    SetConfigurationOption(key_value);
  }
}



// Displays common usage information. For more details, the project wiki should be consulted.
void Help() {
  cout << endl;
  cout << "* Version Info *\n";
  cout << "Updated on 2012/09/02 early morning by Wei at school" << endl;
  cout << "The current version is focus on: Optimizations in order to welcome Qi." << endl;
  cout << endl;
  cout << "* Quick Start Guide *\n";
  cout << "index usage: 'irtk --local --index'\n";
  cout << "  expects a list of paths to document bundles from stdin\n";
  cout << "\n";
  cout << "merge usage: 'irtk --local --merge'\n";
  cout << "  merges the initial indices generated by the indexing process\n";
  cout << "\n";
  cout << "query usage: 'irtk --local --query'\n";
  cout << "  queries the final index generated by the merging process\n";
  cout << "\n";
  cout << "* Additional Notes *\n";
  cout << "Please see the current latest WIKI page at 'http://code.google.com/p/poly-ir-toolkit-wei-june/w/list' for more detailed usage information." << endl;
  cout << "For problems and feedbacks, please contact 'Wei Jiang' using wj382@nyu.edu" << endl;
  cout << "\n";
}

// queryTermsTrueProbabilityDistributionMap,queryTerms1DProbabilityDistributionMap,queryTerms2DProbabilityDistributionMap,queryTermsGoodTuringProbabilityDistributionMap
void LoadUpQueryTermsProbabilityDistribution_Advance(map<string,float> &queryTermsTrueProbabilityDistributionMap,map<string,float> &queryTerms1DProbabilityDistributionMap,map<string,float> &queryTerms2DProbabilityDistributionMap,map<string,float> &queryTermsGoodTuringProbabilityDistributionMap) {
	string inputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermProbablityDistributionFileNameAdvanced));
    // for debug ONLY
    // cout << "inputFileName:" << inputFileName << endl;
    string currentLine;
	ifstream inputfile(inputFileName.c_str());

	if ( inputfile.good() ){
		// ignore the headline
		// the headline is: queryTerm goldStandardRealProbability 1D 2D goodTuring
		getline (inputfile,currentLine);
	}

	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		      boost::algorithm::trim(currentLine);

			  istringstream iss( currentLine );
		      string term;
		      string trueProbability;
		      string oneDProbability;
		      string twoDProbability;
		      string goodTuringProbability;

			  iss >> term;
			  iss >> trueProbability;
			  iss >> oneDProbability;
			  iss >> twoDProbability;
			  iss >> goodTuringProbability;

			  // change the data type from string to float for the probabilities
			  queryTermsTrueProbabilityDistributionMap[term] = atof(trueProbability.c_str());
			  queryTerms1DProbabilityDistributionMap[term] = atof(oneDProbability.c_str());
			  queryTerms2DProbabilityDistributionMap[term] = atof(twoDProbability.c_str());
			  queryTermsGoodTuringProbabilityDistributionMap[term] = atof(goodTuringProbability.c_str());
		}
	}

	inputfile.close();

    if(queryTermsTrueProbabilityDistributionMap.size() == 0 or queryTerms1DProbabilityDistributionMap.size() == 0 or queryTerms2DProbabilityDistributionMap.size() == 0 or queryTermsGoodTuringProbabilityDistributionMap.size() == 0){
	    GetDefaultLogger().Log("Load query Terms True Probability Distribution Map NOT done --- Take Care", false);
	    GetDefaultLogger().Log("Load query Terms 1D Probability Distribution Map NOT done --- Take Care", false);
	    GetDefaultLogger().Log("Load query Terms 2D Probability Distribution Map NOT done --- Take Care", false);
	    GetDefaultLogger().Log("Load query Terms Good Turing Probability Distribution Map NOT done --- Take Care", false);
    }
    else{
	    GetDefaultLogger().Log(Stringify(queryTermsTrueProbabilityDistributionMap.size()) + " <term, trueProbability> pairs have been loaded.", false);
	    GetDefaultLogger().Log(Stringify(queryTerms1DProbabilityDistributionMap.size()) + " <term, 1DProbability> pairs have been loaded.", false);
	    GetDefaultLogger().Log(Stringify(queryTerms2DProbabilityDistributionMap.size()) + " <term, 2DProbability> pairs have been loaded.", false);
	    GetDefaultLogger().Log(Stringify(queryTermsGoodTuringProbabilityDistributionMap.size()) + " <term, GoodTuringProbability> pairs have been loaded.", false);
    }
}


void LoadUpQueryTermsProbabilityDistribution(map<string,float> &queryTermsProbabilityDistributionMap) {
	string inputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermProbablityDistributionFileName));
    // for debug ONLY
    // cout << "inputFileName:" << inputFileName << endl;
    string currentLine;
	ifstream inputfile(inputFileName.c_str());

	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		      boost::algorithm::trim(currentLine);

			  istringstream iss( currentLine );
		      string term;
		      string probability;
			  iss >> term;
			  iss >> probability;

			  // for debug ONLY
			  // cout << term << " " << probability << endl;

			  // Let's try this first.
			  // If not OK, change to boost or sth.
			  queryTermsProbabilityDistributionMap[term] = atof(probability.c_str());

			  // for debug ONLY
			  // cout << term << " " << queryTermsProbabilityDistributionMap[term] << endl;
		}
	}

	inputfile.close();

    if(queryTermsProbabilityDistributionMap.size() == 0){
	    GetDefaultLogger().Log("Load Query Term Probability Distribution by Our Own Method--- Take Care", false);
    }
    else{
    	//cout << "The length of the queryTerms is:" << queryTerms.size() << endl;
    	//Currently, nothing has been done for this logic.
	    GetDefaultLogger().Log(Stringify(queryTermsProbabilityDistributionMap.size()) + " pairs have been loaded.", false);
    }
}

void Query() {
  if (command_line_args.role == CommandLineArgs::kLocal){
      GetDefaultLogger().Log("Starting query processor with index '" + command_line_args.index_files1.prefix() + "'.", false);

      // index path options:
      // string indexPath = "/data1/team/weijiang/compatibleIndexesWithIRTK/gov2";
      // string indexPath = "/data5/team/weijiang/compatibleIndexesWithIRTK/gov2";
      // string indexPath = "/data1/team/weijiang/machine-learning-project-related/programs/trunk/polyIRIndexer";
      // string indexPath = "/data/rkhmel01/ir_toolkit";
      // string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index";
      // string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2Index";
      // string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_Machine_Learned";
      string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25";
      // string indexPath = "/home/diaosi/outputDirForIndexes/originalGov2Index/";
      // string indexPath = "/data3/obukai/workspace/web-search-engine-wei/polyIRIndexer/";
      // string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";


      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);
      command_line_args.index_files1.SetDirectory(indexPath);

	  //command_line_args.index_files1 = ParseIndexName("completedQueryTermsForGOV2EfficiencyTaskWithWrongScoresStored4Part1");
	  //command_line_args.index_files1.SetDirectory("/data5/team/weijiang/compatibleIndexesWithIRTK/gov2");

      LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
                                     command_line_args.query_mode_local, command_line_args.result_format_local);
  }
}

CollectionIndexer& GetCollectionIndexer() {
  static CollectionIndexer collection_indexer;
  return collection_indexer;
}

// This performs the merge for the complete index starting from the initial 0.0 indices.
void MergeInitial() {
  DIR* dir;
  if ((dir = opendir(".")) == NULL) {
    GetErrorLogger().LogErrno("opendir() in MergeInitial(), could not open directory to access files to merge", errno, true);
    return;
  }

  int num_indices = 0;
  struct dirent* entry;
  while ((entry = readdir(dir)) != NULL) {
    const char initial_index_prefix[] = "index.idx.0";  // Just checks for the presence of the index files.
    int idx_file = strncmp(entry->d_name, initial_index_prefix, sizeof(initial_index_prefix) - 1);
    if (idx_file == 0) {
      ++num_indices;
    }
  }

  closedir(dir);

  const int kDefaultMergeDegree = 64;
  const bool kDeleteMergedFiles = Configuration::GetResultValue(Configuration::GetConfiguration().GetBooleanValue(config_properties::kDeleteMergedFiles));
  CollectionMerger merger(num_indices, (command_line_args.merge_degree <= 0 ? kDefaultMergeDegree : command_line_args.merge_degree), kDeleteMergedFiles);
}

void LoadUpQueryTermDocIDPairList(vector<string> &TermDocIDPairList){

      string inputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryIDTermDocIDPairInputFile));
	  string currentLine = "";
	  string currentPair = "";
	  ifstream inputfile(inputFileName.c_str());
	  vector<string> priorityListElements;
	  while ( inputfile.good() )
	  {
		  getline (inputfile,currentLine);
		  if(currentLine != ""){
		      boost::algorithm::trim(currentLine);
		      TermDocIDPairList.push_back( currentLine );
		  }
	  }
	  inputfile.close();

	  if(TermDocIDPairList.size() == 0){
		  GetDefaultLogger().Log("Load query term list is NOT Done", true);
	  }
	  else{
		  GetDefaultLogger().Log(Stringify(TermDocIDPairList.size()) + " pairs have been loaded.", false);
	  }
}

void LoadUpDocSpecifcPositionsPairs(map<string, string> &docLookUpDict) {
    cout << "Load Up Doc Specifc Positions Pairs()" << endl;
    string auxInputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kAuxDocSpecifcPositionFolderFileList));
    string basePath = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kDocSpecifcPositionFolderPath));

    ifstream inputFile1Handler;
    ifstream auxInputFile1Handler;


    auxInputFile1Handler.open(auxInputFileName.c_str());
    //cout << "auxInputFileName:" << auxInputFileName << endl;


	string currentLine;
	ifstream inputfile(auxInputFileName.c_str());

	vector<string> fileNames;
	vector<string>::iterator fileNamesIterator;
	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		//cout << currentLine << endl;
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    //cout << currentLine << endl;
		    fileNames.push_back(currentLine);
		}
	}
	inputfile.close();
	//cout << fileNames.size() << endl;



	string dataInputFileName = "";
	int processingNUmberCounter = 0;
	for(fileNamesIterator = fileNames.begin(); fileNamesIterator != fileNames.end(); fileNamesIterator++)
	{
		processingNUmberCounter ++;
	    dataInputFileName = basePath + *fileNamesIterator;
	    //cout << processingNUmberCounter << " Processing File:" << *fileNamesIterator << endl;
	    //cout << "dataInputFileName:" << dataInputFileName << endl;
		ifstream inputfile2(dataInputFileName.c_str());

		while ( inputfile2.good() )
		{
			getline (inputfile2,currentLine);
			if(currentLine != ""){
			      boost::algorithm::trim(currentLine);
			      //cout << currentLine << endl;
				  istringstream iss( currentLine );
			      string docID;
			      string WARCTRECID;
			      string compressedFileID;
			      string beginningPosition;
			      string endingPosition;
			      string valueString;

				  iss >> docID;
				  iss >> WARCTRECID;
				  iss >> compressedFileID;
				  iss >> beginningPosition;
				  iss >> endingPosition;
				  iss >> valueString;

				  valueString = WARCTRECID + "_" + *fileNamesIterator + "_" + compressedFileID + "_" + beginningPosition + "_" + endingPosition;
			      docLookUpDict[docID] = valueString;
			}
		}
		inputfile2.close();
	}
	cout << "Length of the docLookUpDict:" << docLookUpDict.size() << endl;
	//cout << "small test:" << docLookUpDict["99869"] << endl;
}





void LoadUpCompressedFileIDs(map<string, string> &fileLookUpDict) {
    cout << "Load Up Compressed File IDs" << endl;
    string auxInputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kAuxCompressedFileIDFileList));
    string basePath = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kCompressedFileIDFilePath));

    ifstream inputFile1Handler;
    ifstream auxInputFile1Handler;


    auxInputFile1Handler.open(auxInputFileName.c_str());
    //cout << "auxInputFileName:" << auxInputFileName << endl;


	string currentLine;
	ifstream inputfile(auxInputFileName.c_str());

	vector<string> fileNames;
	vector<string>::iterator fileNamesIterator;
	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		//cout << currentLine << endl;
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    //cout << currentLine << endl;
		    fileNames.push_back(currentLine);
		}
	}
	inputfile.close();
	//cout << fileNames.size() << endl;



	string dataInputFileName = "";
	int processingNUmberCounter = 0;
	for(fileNamesIterator = fileNames.begin(); fileNamesIterator != fileNames.end(); fileNamesIterator++)
	{
		processingNUmberCounter ++;
	    dataInputFileName = basePath + *fileNamesIterator;
	    //cout << processingNUmberCounter << " Processing File:" << *fileNamesIterator << endl;
	    //cout << "dataInputFileName:" << dataInputFileName << endl;
		ifstream inputfile2(dataInputFileName.c_str());



		while ( inputfile2.good() )
		{
			getline (inputfile2,currentLine);
			if(currentLine != ""){
			      boost::algorithm::trim(currentLine);
			      //cout << currentLine << endl;
				  istringstream iss( currentLine );
			      string fileID;
			      string filePath;
			      string valueString;
			      string keyString;

				  iss >> fileID;
				  iss >> filePath;


				  valueString = filePath;
				  keyString = *fileNamesIterator + "_" + fileID;
			      fileLookUpDict[ keyString ] = valueString;
			}
		}
		inputfile2.close();
	}
	cout << "Length of the fileLookUpDict:" << fileLookUpDict.size() << endl;
	//cout << "small test:" << fileLookUpDict[ "en0004_100"] << endl;
}

void LoadUpTerms(vector<string> &terms) {
    cout << "Load Up Terms updated on 20130805 night by Wei" << endl;
    string inputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermsONLYInputfileName));
    cout << "inputFileName:" << inputFileName << endl;

    string currentLine;
	ifstream inputfile(inputFileName.c_str());
	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    terms.push_back(currentLine);
		}
	}
	cout << "# of terms loaded:" << terms.size() << endl;
	inputfile.close();
}

void LoadUpTerms(map<string,int> &terms, string inputFileName) {
    cout << "Load Up Terms updated on 20130911 morning by Wei at school." << endl;
    cout << "inputFileName: " << inputFileName << endl;
    string currentLine;
	ifstream inputfile(inputFileName.c_str());

	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    terms[currentLine] = 1;
		}
	}
	cout << "The size of the map<string,int> terms:" << terms.size() << endl;
	inputfile.close();
}



void LoadUpLexiconTermsWei20130213(vector<string> &queryTerms) {
    cout << "Load Up Lexicon Terms updated on 20130213" << endl;

    string inputFileName = "/home/diaosi/gov2ClearYourMindAndDoItAgain/lexiconTermsONLY.txt";
    cout << "inputFileName:" << inputFileName << endl;
    string currentLine;
	ifstream inputfile(inputFileName.c_str());

	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    queryTerms.push_back(currentLine);
		}
	}
	cout << "The length of the queryTerms is:" << queryTerms.size() << endl;
	inputfile.close();
}

string make_the_value_into_string_format(float originalValue){
	  string originalValueInStringFormat = "";
	  stringstream ss (stringstream::in | stringstream::out);
	  ss << originalValue;
	  originalValueInStringFormat = ss.str();
	  return originalValueInStringFormat;
}

string make_the_value_into_string_format(int originalValue){
	  string originalValueInStringFormat = "";
	  stringstream ss (stringstream::in | stringstream::out);
	  ss << originalValue;
	  originalValueInStringFormat = ss.str();
	  return originalValueInStringFormat;
}

void Cat() {
  string flagString = "";
  cout << "value 1: original toolkit function for supporting the commands --cat and --cat-term" << endl;
  cout << "value 2: (STOP USING THIS) ask for one docID as input, retrieve back the whole document and display on the screen effectively.(How to make use of the original web page is the next step. Updated 2012/06/27 by Wei)" << endl;
  cout << "value 3: (STOP USING THIS) this is currently supported for the machine learning research project for positions related features(comment on 2013/01/11: OLD)." << endl;
  cout << "value 4: (STOP USING THIS, USE OPTION 10 instead) ask for multiple query terms as input until BLANK encountered, retrieve back the corresponding inverted lists and apply to the build-in pruning policies(comment on 2013/01/11: maybe useful)." << endl;
  cout << "Value 5: output the lexicon term with the original index statistics.(updated by Wei:2012/10/06, this function can be used for the pruning, impact and layering project as well)" << endl;
  cout << "value 6: update the external index given the new score intermediate file( comment on 2013/01/11: whether we need this method is in doubt now)." << endl;
  cout << "value 7: (STOP USING THIS) for the Learning To Prune research project. Updated 2012/09/19 by Wei" << endl;
  cout << "value 8: (STOP USING THIS) the input will be some query terms, output the inverted list of those terms. Updated 2012/11/26 by Wei" << endl;
  cout << "value 9: the input will be a set of indexes, the output will be an aux file that contains two rows, first is the lexicon term, second the freq in the collection. Updated 2012/11/28 by Wei" << endl;
  cout << "value 10: apply ML technique and store the probability back into the external inverted external index. Updated by Wei 20130126" << endl;
  cout << "value 11: run a small python program in C++" << endl;
  cout << "value 12: (STOP USING THIS)output external sorting files for python external sorting. 2013/02/12" << endl;
  cout << "value 13: (STOP USING THIS, cause we already have another random sample method)build a score/probability histogram in main memory and output it to disk. Updated by Wei 2013/08/04" << endl;
  cout << "value 14: Universal cut applying to the original index. Updated by Wei 2013/02/16" << endl;
  cout << "value 15: (test playground)Load the query term probability distribution. Updated by Wei 2013/02/23" << endl;
  cout << "value 16: Given a set of queryTerms, sort the inverted index of each of them, and output the threshold of corresponding percentage cut for the pruning method TCP. Updated by Wei 2014/01/28 afternoon" << endl;
  cout << "value 17: (Currently NOT using)build the query view(QV), and put the counter info into the external index. Updated by Wei 2013/02/28" << endl;
  cout << "value 18: run a small program using the 'COMBINATION LIBRARY' developped at poly in C++" << endl;
  cout << "value 19: This is an overall phase2 information generation task.(Updated by Wei on 2013/09/26 morning)" << endl;
  cout << "value 20: (STOP USING THIS) (Prof said there is NO BIG DEAL)The invertion operation of the inverted index." << endl;
  cout << "value 21: This is task for getting the feature: posting_rank_in_the_doc(Updated by Wei 20130714)" << endl;
  cout << "value 22: This is task for generating the Xdoc value for each document in the collection(eg. gov2 dataset)" << endl;
  cout << "value 23: This is task for outputing the <trecID,docID> pair to screen based on the info from the doc basic/extended map(Updated by Wei 2013/08/05 night)" << endl;
  cout << "value 24: This is task for outputing the set of probabilies for the randomly selected postings of the gov2 index(Updated by Wei 2014/01/05 afternoon at school)" << endl;
  cout << "value 25: This is task for outputing the 25M documents and their # of postings formed in the index for each document(Updated by Wei 2013/08/07 night)" << endl;
  cout << "value 26: This is task for: The input is an OLD 32BIT lexicon, the output is an NEW 64BIT lexicon(Updated by Wei 2013/08/08 afternoon)" << endl;
  cout << "value 27: Compute the posting rank in list and store the posting rank in list in external index(memory mapped). Updated by Wei on 20130910 afternoon at school" << endl;
  cout << "value 28: Build a forward index including the value: posting_rank_in_list(currently in test). Updated by Wei on 20130910 night at school" << endl;
  cout << "value 29: Prototyping the posting oriented uniform pruning method. Updated by Wei on 2014/02/03 afternoon at school." << endl;
  cout << "value 30: A small example of showing how much memory it will take to load the longest list info into main memory. Updated by Wei on 2013/09/20 at school." << endl;
  cout << "value 31: [DECREPIT because we do NOT need this analysis any more SINCE 2013/10/22 morning]output the set of documents needed to be parsed given a set of randomly selected postings. For the analysis of the distribution of the maximum potential usefulness. Updated by Wei on 2013/09/25 night at school." << endl;
  cout << "value 32: Documents picking analysis. Whether this alg. is useful or NOT has NOT been tested yet. So, let's test it. Updated on 2013/10/22 morning" << endl;
  cout << "value 33: Test the fucking sort() OP. Updated on 2014/01/03 afternoon" << endl;
  cout << "value 34: (STOP USING THIS, and I might have found sth interesting already in the parsing pipeline)The document analysis process. The offline phases and Online process. Updated on 2014/02/04 afternoon" << endl;
  cout << "value 35: export the rank in the list for each posting from full index" << endl;
  cout << "Enter the function value[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35]:";
  getline(cin, flagString);
  boost::algorithm::trim(flagString);



  if (flagString == "1"){
	  IndexCat index_cat(command_line_args.index_files1);
	  index_cat.Cat(command_line_args.term, command_line_args.term_len);
  }
  else if (flagString == "2"){

	  command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/clueweb");

	  // Init some variables to support this operation.
	  map<string, string> docLookUpDict;
	  LoadUpDocSpecifcPositionsPairs(docLookUpDict);

	  map<string, string> fileLookUpDict;
	  LoadUpCompressedFileIDs(fileLookUpDict);

	  IndexCat index_cat(command_line_args.index_files1);
	  string previousCompressedFilePath = "";

	  while (true){
		  cout << "Enter docID[0,1022253]:";
		  string docIDSearchFor = "";
		  getline(cin, docIDSearchFor);
		  boost::algorithm::trim(docIDSearchFor);

		  int value = -1;

		  try
		  {
			  value = atoi(docIDSearchFor.c_str());
			  if ( value >= 0 && value <= 1022253){
				  index_cat.Cat(docIDSearchFor, docLookUpDict, fileLookUpDict,previousCompressedFilePath);
			  }
			  else{
				  cout << "The docID you entered is out of range. Please try again." << endl;
			  }
		  }
		  catch(int e)
		  {
			   cout << "The string you entered is NOT an legal number." << endl;
		  }
	  }
  }
  else if (flagString == "3"){
	  string outputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermDocIDPairPositionsOutputFile));

	  ofstream outputFileHandler(outputFileName.c_str());

	  vector<string> queryID_Term_docIDList;
	  LoadUpQueryTermDocIDPairList(queryID_Term_docIDList);

	  map<string, string> docLookUpDict;
	  LoadUpDocSpecifcPositionsPairs(docLookUpDict);

	  map<string, string> fileLookUpDict;
	  LoadUpCompressedFileIDs(fileLookUpDict);

	  string previousCompressedFilePath = "";


	  int counter = 0;
	  int numberOfDocNotFound = 0;

	  //init for the currentDocID,begin.
	  istringstream iss(queryID_Term_docIDList[0]);
      string queryNumber;
      string queryTerm;
      string docID = "";

	  iss >> queryNumber;
	  iss >> queryTerm;
	  iss >> docID;
	  string currentDocID = docID;
	  //init for the currentDocID,end.

	  vector<string> termNumberList;
	  for( unsigned int i = 0; i < queryID_Term_docIDList.size(); i++ ) {
		  //let's divide the term list based on docID.
		  istringstream iss(queryID_Term_docIDList[i]);
	      string queryNumber;
	      string queryTerm;
	      string docID = "";

		  iss >> queryNumber;
		  iss >> queryTerm;
		  iss >> docID;
		  if(currentDocID != docID){
			  //cat the things and init the whole things.
			  IndexCat index_cat(command_line_args.index_files1);
			  GetDefaultLogger().Log("Processing " + Stringify(termNumberList.size() ) + " Term, Query number pair in docID:" +  currentDocID, false);
			  //cout << "In:" << currentDocID << endl;
			  index_cat.Cat(flagString, outputFileHandler, termNumberList, currentDocID, docLookUpDict, fileLookUpDict,previousCompressedFilePath,numberOfDocNotFound);
			  //cout << "Out:" << currentDocID << endl;
			  currentDocID = "";
			  termNumberList.clear();

			  currentDocID = docID;
			  termNumberList.push_back(queryNumber + "_" + queryTerm);
		  }
		  else{
			  termNumberList.push_back(queryNumber + "_" + queryTerm);
		  }
		  counter ++;
		  cout << endl;
	  }
	  //wrap up. Process the final step.
	  IndexCat index_cat(command_line_args.index_files1);
	  GetDefaultLogger().Log("Processing " + Stringify(termNumberList.size() ) + " Term, Query number pair in docID:" +  currentDocID, false);
	  index_cat.Cat(flagString, outputFileHandler, termNumberList, currentDocID, docLookUpDict, fileLookUpDict,previousCompressedFilePath,numberOfDocNotFound);
	  currentDocID = "";
	  termNumberList.clear();

	  GetDefaultLogger().Log(Stringify(counter) + " pairs have been processed.", false);
	  GetDefaultLogger().Log(Stringify(numberOfDocNotFound) + " docs are not found.", false);

	  outputFileHandler.close();

  }
  else if (flagString == "4"){
	  cout << "Updated by wei 2013/02/23" << endl;
	  cout << "This function has been very OLD and please do NOT use it anymore." << endl;
	  cout << "The logic has been completely commented out" << endl;
	  /*
	  string flagString2 = "";
	  cout << "value 1: Run build-in example with query terms provided in a plain file." << endl;
	  cout << "Enter the function value[1]:";


	  //for user to enter:
	  //getline(cin, flagString2);
	  //for debug:
	  flagString2 = "1";
	  cout << "You entered:" << flagString2 << endl;

	  boost::algorithm::trim(flagString2);


	  vector<string> queryTerms;

	  if (flagString2 == "1"){
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // 362.53 seconds for 23 query terms(without scores stored).
		  // 439.084 seconds for 23 query terms(with scores stored).

		  // Load the complete query terms for evaluation, for production but for test.
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  //string queryTest0 = "snuff";
		  //string queryTest1 = "snyder";
		  //string queryTest2 = "so";
		  //string queryTest3 = "soalr";
		  //string queryTest4 = "soap";

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  string queryTerm0 = "00000000000000000000"; //70 postings.
		  //string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  //queryTerms.push_back(queryTest0);
		  //queryTerms.push_back(queryTest1);
		  //queryTerms.push_back(queryTest2);
		  //queryTerms.push_back(queryTest3);
		  //queryTerms.push_back(queryTest4);

		  queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  queryTerms.push_back(queryTerm2);
		  //queryTerms.push_back(queryTerm3);
		  //queryTerms.push_back(queryTerm4);
		  //queryTerms.push_back(queryTerm5);

	  }
	  else if (flagString2 == "2"){
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }
	  else{
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }

	  // Actual doing the pruning here.
	  for(unsigned int i = 0; i < queryTerms.size(); i++)
	  {
		  cout << "query term " << i << ":"<< queryTerms[i] << endl;
	  }

	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/11
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "wei_pruning_development_" + string(buff) + "-100%";


	  // Lots of common between pruning and layering project. From now, we use the class LayeredIndexGenerator to do pruning as well.
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  //TODO: ??? the output meta index, some numbers are not that consistent with the original one.
	  // The 1st argument is the vector queryTerms which have all the query terms ready for pruning.
	  // The 2ed argument is the bool argument of the debugFlag. True for opening the debugging mode, false for closing the debugging mode.
	  // The 3rd argument is the bool argument of the switch for storing the computed scores into external index. True for yes and false for no.
	  cout << "store the external score into the index set to false" << endl;
	  layered_index_generator.CreatePrunedIndexForMultipleTerms(queryTerms, false, false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  */
  }
  else if (flagString == "5"){

	  command_line_args.index_files1.SetDirectory("/data5/team/weijiang/compatibleIndexesWithIRTK/clueweb09/cluewebCategoryBEnglishNonPositional2");
	  IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "wei_pruning_development_" + string(buff) + "-90%";

	  // Lots of common between pruning and layering project. From now, we use the class LayeredIndexGenerator to do pruning as well.
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
	  layered_index_generator.CreatePrunedIndexAuxInfo();
  }
  else if (flagString == "6"){
	    cout << "step1: Update the target external index " << endl;
	    command_line_args.index_files1 = ParseIndexName("completedQueryTermsForGOV2EfficiencyTaskWithWrongScoresStored4Part1");
	    command_line_args.index_files1.SetDirectory("/data5/team/weijiang/compatibleIndexesWithIRTK/gov2");

		GetDefaultLogger().Log("Starting local query processor with index '" + command_line_args.index_files1.prefix() + "'.", false);

		LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
				command_line_args.query_mode_local, command_line_args.result_format_local,"auto");
		query_processor.UpdateExternalIndexFromHardDrive();
  }
  else if (flagString == "7"){
	    cout << "NO logic here, STOP using this function" << endl;
	    /*
	    cout << "for the Learning To Prune research project. Output a training file which contains both the good and bad training examples for weka.(head -1000 queries, top100 results with their postings. The same amount as the complementary.)" << endl;
	    command_line_args.index_files1 = ParseIndexName("completedQueryTermsForGOV2EfficiencyTaskWithWrongScoresStored4Part1");
	    command_line_args.index_files1.SetDirectory("/data5/team/weijiang/compatibleIndexesWithIRTK/gov2");
		GetDefaultLogger().Log("Starting local query processor with index '" + command_line_args.index_files1.prefix() + "'.", false);
	    LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
	                                     command_line_args.query_mode_local, command_line_args.result_format_local);
	    */
  }
  else if (flagString == "8"){
	  cout << "NO logic here, STOP using this function" << endl;
	  /*
	  cout << "The input will be a set of 1 term queries, the output will be a set of files, for each line, will have the following format: qid:term (docID1,partialBM25Score1),(docID2,partialBM25Score2) ..." << endl;
	  command_line_args.index_files1 = ParseIndexName("index");
	  command_line_args.index_files1.SetDirectory("/data/rkhmel01/ir_toolkit");
	  LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
									 command_line_args.query_mode_local, command_line_args.result_format_local);
	  */
  }
  else if (flagString == "9"){
	  cout << "The input will be some set of compatible indexes, the output will be a file containing all the lexicons with its lexicon_freq_in_collection." << endl;
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  // Lots of common between pruning and layering project. From now, we use the class LayeredIndexGenerator to do pruning as well.
	  // Here, I just borrow the function called CreatePrunedIndexAuxInfo()
	  // There is NO USE for this variable output_index_predix

	  // option1:
	  // const char* output_index_prefix = (command_line_args.output_index_prefix != NULL ? command_line_args.output_index_prefix : "index_layered");

	  // option2:
	  const string output_index_prefix = "wei_testing_NO_USE";
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1,output_index_prefix);
	  layered_index_generator.CreatePrunedIndexAuxInfo();
  }
  else if (flagString == "10"){
	  cout << "apply ML technique and store the probability into the inverted external index";
	  std::cout << "Storing Procedure Begins..." << std::endl;

	  // ****************************************************************************************
	  // the original pruning logic

	  string flagString2 = "";
	  cout << "value 1: Run build-in example with query terms provided in a plain file." << endl;
	  cout << "Enter the function value[1]:";


	  //for user to enter:
	  //getline(cin, flagString2);
	  //for debug:
	  flagString2 = "1";
	  cout << "You entered:" << flagString2 << endl;

	  boost::algorithm::trim(flagString2);

	  vector<string> queryTerms;
	  float percentageToKeepOfTheWholeIndex = 1.0;
	  int pruningMethodCodeOfTheWholeIndex = 5;


	  if (flagString2 == "1"){
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // 362.53 seconds for 23 query terms(without scores stored).
		  // 439.084 seconds for 23 query terms(with scores stored).

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  // string queryTest0 = "snuff";
		  // string queryTest1 = "snyder";
		  // string queryTest2 = "so";
		  string queryTest3 = "soalr";
		  // string queryTest4 = "soap";

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  // string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";
		  // string queryTerm9 = "1031b"; // 27 postings 30
		  // string queryTerm10 = "10cs"; // 19 postings 30

		  // queryTerms.push_back(queryTest0);
		  // queryTerms.push_back(queryTest1);
		  // queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  // queryTerms.push_back(queryTest4);

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);
		  // queryTerms.push_back(queryTerm9);
		  // queryTerms.push_back(queryTerm10);

	  }
	  else if (flagString2 == "2"){
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }
	  else{
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }



	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/24
	  // OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

	  // NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "wei_pruning_development_universal_cutting_" + string(buff) + "-" + make_the_value_into_string_format(percentageToKeepOfTheWholeIndex) + "-" + make_the_value_into_string_format(pruningMethodCodeOfTheWholeIndex);


	  // Lots of common between the pruning and layering project.
	  // From now, we use the class LayeredIndexGenerator to do pruning as well.
	  // Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  //TODO: ??? the output meta index, some numbers are not that consistent with the original one.
	  // The 1st argument is the vector queryTerms which have all the query terms ready for pruning.
	  // The 2ed argument is the bool argument of the debugFlag. True for opening the debugging mode, false for closing the debugging mode.
	  // The 3rd argument is the bool argument of the switch for storing the computed scores into external index. True for yes and false for no.
	  // No matter what, the external index will be generated.
	  // If the flag set to false, it will ONLY store (1)chunk max score and (2)block max score
	  // If the flag set to true, it will ALSO store (2) the aux info for each posting which is very useful in the context of pruning
	  // Updated by Wei 2013/01/26, I think I am almost there and just keep going.
	  // Notes: Let's try this today:2013/01/26
	  cout << "store the external score into the index set to be true" << endl;

	  // old version, prune based on each inverted list
	  // layered_index_generator.CreatePrunedIndexForMultipleTerms(queryTerms, false, true);

	  // current version, prune based on universal importance of this posting

	  map<string,float> queryTermsProbabilityDistributionMap;
	  LoadUpQueryTermsProbabilityDistribution(queryTermsProbabilityDistributionMap);

	  /*
	  for(map<string,float>::iterator queryTermsProbabilityDistributionMapIterator = queryTermsProbabilityDistributionMap.begin(); queryTermsProbabilityDistributionMapIterator != queryTermsProbabilityDistributionMap.end(); queryTermsProbabilityDistributionMapIterator++){
		  cout << queryTermsProbabilityDistributionMapIterator->first << " " << queryTermsProbabilityDistributionMapIterator->second << endl;
	  }
	  */


	  layered_index_generator.CreatePrunedIndexForMultipleTerms(queryTerms, true, true, percentageToKeepOfTheWholeIndex, pruningMethodCodeOfTheWholeIndex,queryTermsProbabilityDistributionMap);

	  // old and NOT flexible version, do NOT use anymore
	  // layered_index_generator.CreatePrunedIndexForMultipleTermsBasedOnUniversalScoreImportanceOLDAndNotUsed(queryTerms, false, true, percentageToKeepOfTheWholeIndex, pruningMethodCodeOfTheWholeIndex);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;

	  // ****************************************************************************************

	  std::cout << "Storing Procedure Ends." << std::endl;
  }
  else if (flagString == "11"){
	    // Remember to issue the following command before calling your own python module.
	    // export PYTHONPATH=${PYTHONPATH}:./
	  	PyObject *pName, *pModule, *pFunc;
	    PyObject *pArgs, *pValue;

	    Py_Initialize();
	    // option1
	    // pName = PyString_FromString("os");

	    // option2 (NOT working)
	    // pName = PyString_FromString("/home/obukai/workspace/polyIRToolkit/polyIRIndexer/simplePythonPlayGround");

	    // option3
	    pName = PyString_FromString("pythonModuleForCallingFromC");

	    /* Error checking of pName left out */

	    pModule = PyImport_Import(pName);
	    Py_DECREF(pName);

		PyRun_SimpleString("from time import time,ctime\n"
						   "print 'Today is',ctime(time())\n");

	    if (pModule != NULL) {
	    	// test1: call the function pythonModuleForCallingFromC_test
	        pFunc = PyObject_GetAttrString(pModule, "pythonModuleForCallingFromC_hello");
	        // pFunc is a new reference

	        if (pFunc && PyCallable_Check(pFunc)) {
	            pArgs = PyTuple_New(0);
	            pValue = PyObject_CallObject(pFunc, pArgs);
	            Py_DECREF(pArgs);
	            if (pValue != NULL) {
	                Py_DECREF(pValue);
	            }
	            else {
	                Py_DECREF(pFunc);
	                Py_DECREF(pModule);
	                PyErr_Print();
	                fprintf(stderr,"Call failed\n");
	            }
	        }
	        else {
	            if (PyErr_Occurred())
	                PyErr_Print();
	            fprintf(stderr, "Cannot find the function");
	        }
	        Py_XDECREF(pFunc);

	        // test2: call the function pythonModuleForCallingFromC_loadTheAuxInfoIntoMemory
	        pFunc = PyObject_GetAttrString(pModule, "pythonModuleForCallingFromC_loadTheAuxInfoIntoMemory");
	        // pFunc is a new reference

	        if (pFunc && PyCallable_Check(pFunc)) {
	            pArgs = PyTuple_New(0);
	            /*
	            for (i = 0; i < 2; ++i) {
	                pValue = PyInt_FromLong(3);
	                if (!pValue) {
	                    Py_DECREF(pArgs);
	                    Py_DECREF(pModule);
	                    fprintf(stderr, "Cannot convert argument\n");

	                }
	                // pValue reference stolen here:
	                PyTuple_SetItem(pArgs, i, pValue);
	            }
	            */
	            pValue = PyObject_CallObject(pFunc, pArgs);
	            Py_DECREF(pArgs);
	            if (pValue != NULL) {
	                printf("Result of call: %ld\n", PyInt_AsLong(pValue));
	                Py_DECREF(pValue);
	            }
	            else {
	                Py_DECREF(pFunc);
	                Py_DECREF(pModule);
	                PyErr_Print();
	                fprintf(stderr,"Call failed\n");

	            }
	        }
	        else {
	            if (PyErr_Occurred())
	                PyErr_Print();
	            fprintf(stderr, "Cannot find the function");
	        }
	        Py_XDECREF(pFunc);

	        /*
	        // test3: load the experimental term: 000sites to test the it is OK
			pFunc = PyObject_GetAttrString(pModule, "pythonModuleForCallingFromC_loadTheActualDataIntoMemory");
			// pFunc is a new reference

			if (pFunc && PyCallable_Check(pFunc)) {
				pArgs = PyTuple_New(1);
				string tempTerm = "000sites";
				pValue = PyString_FromString( tempTerm.c_str() );

				if (!pValue) {
					Py_DECREF(pArgs);
					Py_DECREF(pModule);
					fprintf(stderr, "Cannot convert argument\n");
				}
				// pValue reference stolen here:
				PyTuple_SetItem(pArgs, 0, pValue);

				pValue = PyObject_CallObject(pFunc, pArgs);

				Py_DECREF(pArgs);
				if (pValue != NULL) {
					printf("Result of call: %ld\n", PyInt_AsLong(pValue));
					Py_DECREF(pValue);
				}
				else {
					Py_DECREF(pFunc);
					Py_DECREF(pModule);
					PyErr_Print();
					fprintf(stderr,"Call failed\n");
				}
			}
			else {
				if (PyErr_Occurred())
					PyErr_Print();
				fprintf(stderr, "Cannot find function \"%s\"\n", "multiply");
			}
			Py_XDECREF(pFunc);

			// test4: try to assign the rank given the term and the trecID
			pFunc = PyObject_GetAttrString(pModule, "pythonModuleForCallingFromC_assignTheRank");
			// pFunc is a new reference

			if (pFunc && PyCallable_Check(pFunc)) {
				pArgs = PyTuple_New(2);

				// argument 1:
				string queryTerm = "000sites";
				pValue = PyString_FromString( queryTerm.c_str() );

				if (!pValue) {
					Py_DECREF(pArgs);
					Py_DECREF(pModule);
					fprintf(stderr, "Cannot convert argument\n");
				}
				// pValue reference stolen here:
				PyTuple_SetItem(pArgs, 0, pValue);

				// argument 2:
				string trecID = "GX259-64-8400118";
				pValue = PyString_FromString( trecID.c_str() );

				if (!pValue) {
					Py_DECREF(pArgs);
					Py_DECREF(pModule);
					fprintf(stderr, "Cannot convert argument\n");
				}
				// pValue reference stolen here:
				PyTuple_SetItem(pArgs, 1, pValue);

				pValue = PyObject_CallObject(pFunc, pArgs);

				Py_DECREF(pArgs);
				if (pValue != NULL) {
					printf("Result of call: %ld\n", PyInt_AsLong(pValue));
					Py_DECREF(pValue);
				}
				else {
					Py_DECREF(pFunc);
					Py_DECREF(pModule);
					PyErr_Print();
					fprintf(stderr,"Call failed\n");
				}
			}
			else {
				if (PyErr_Occurred())
					PyErr_Print();
				fprintf(stderr, "Cannot find function \"%s\"\n", "multiply");
			}
			Py_XDECREF(pFunc);
			*/

	        // final step: delete the module
	        Py_DECREF(pModule);
	    }
	    else {
	        PyErr_Print();
	        fprintf(stderr, "Failed to load the module");
	    }
	    Py_Finalize();

  }
  else if (flagString == "12"){
	  cout << "output external sorting files for python external sorting" << endl;
	  cout << "Storing Procedure Begins..." << std::endl;

	  // ****************************************************************************************
	  // the original pruning logic

	  int sortingMethodCodeForTheTerm = 4;

	  string flagString2 = "";
	  cout << "value 1: Run build-in example with query terms provided in a plain file." << endl;
	  cout << "Enter the function value[1]:";


	  //for user to enter:
	  //getline(cin, flagString2);
	  //for debug:
	  flagString2 = "1";
	  cout << "You entered:" << flagString2 << endl;

	  boost::algorithm::trim(flagString2);

	  vector<string> queryTerms;

	  if (flagString2 == "1"){
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // 362.53 seconds for 23 query terms(without scores stored).
		  // 439.084 seconds for 23 query terms(with scores stored).

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);


		  LoadUpLexiconTermsWei20130213(queryTerms);

		  // updated by Wei 2013/02/13 add more sensitive terms to do test

		  /*
		  string queryTest0 = "snuff";	// 3854 postings.
		  string queryTest1 = "snyder";	// 47802 postings.
		  string queryTest2 = "so";	// 3219389 postings.
		  string queryTest3 = "soalr";	// 10 postings.
		  string queryTest4 = "soap";	// 175292 postings.

		  string queryTest5 = "0";	// 8400333 postings.
		  string queryTest6 = "113";	// 599688 postings.
		  string queryTest7 = "2";	// 10966214 postings.
		  */



		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  // string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";

		  /*
		  queryTerms.push_back(queryTest0);
		  queryTerms.push_back(queryTest1);
		  queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  queryTerms.push_back(queryTest4);

		  queryTerms.push_back(queryTest5);
		  queryTerms.push_back(queryTest6);
		  queryTerms.push_back(queryTest7);
		  */

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);

	  }
	  else if (flagString2 == "2"){
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }
	  else{
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }

	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/24
	  // OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

	  // NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";
	  // Lots of common between the pruning and layering project.
	  // From now, we use the class LayeredIndexGenerator to do pruning as well.
	  // Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
	  Timer pruning_time;
	  layered_index_generator.CreateExternalScoreFileForEachQueryTerm(queryTerms, false, sortingMethodCodeForTheTerm);
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Storing Procedure Ends." << std::endl;
  }
  else if (flagString == "13"){
	  cout << "build a score/probability histogram in main memory and output it to disk" << endl;
	  cout << "(1) I only do the histogram based on a certain set of terms(sampling)(select some terms that can represent the whole lexicon). Currently, I just select some query terms from the head 95K training query log" << endl;
	  cout << "(2) I can use the in memory histogram to count things (Proposed by Prof)" << endl;
	  cout << "Building score/value Histogram Begins..." << std::endl;

	  // The meaning of different sortingMethodCodeForTheTerm
	  // (needed to be filled completely)
	  // sortingMethodCodeForTheTerm = 1
	  // sortingMethodCodeForTheTerm = 2
	  // sortingMethodCodeForTheTerm = 3
	  // sortingMethodCodeForTheTerm = 4
	  // sortingMethodCodeForTheTerm = 5 Using our current MLed model
	  int sortingMethodCodeForTheTerm = 5;

	  vector<string> terms;
	  // Load Up some terms into the variable queryTerms
	  LoadUpTerms(terms);

	  // updated by Wei 2013/02/13 add more typical terms to do test
	  // string queryTest0 = "snuff";	// 3854 postings.
	  // string queryTest1 = "snyder";	// 47802 postings.
	  // string queryTest2 = "so";	// 3219389 postings.
	  // string queryTest3 = "soalr";	// 10 postings.
	  // string queryTest4 = "soap";	// 175292 postings.

	  // string queryTest5 = "0";	// 8400333 postings.
	  // string queryTest6 = "113";	// 599688 postings.
	  // string queryTest7 = "2";	// 10966214 postings.




	  // 00000000000000000000 0000000000000000000

	  // ideal for debugging, mark1
	  // string queryTerm0 = "00000000000000000000"; //70 postings.
	  // string queryTerm1 = "0"; //8400333 postings.

	  // ideal for debugging, mark2
	  // string queryTerm2 = "0000000000000000000"; //46 postings.
	  //string queryTerm3 = "000000000000000000"; //82 postings.
	  //string queryTerm4 = "0000000000000000"; //251 postings.
	  //string queryTerm5 = "00000000000000000"; //87 postings.

	  // ideal for debugging, mark3, 2 postings
	  // string queryTerm6 = "000sites";

	  // ideal for debugging, mark4
	  // string queryTerm7 = "00wc";
	  // string queryTerm8 = "03255";


	  // queryTerms.push_back(queryTest0);
	  // queryTerms.push_back(queryTest1);
	  // queryTerms.push_back(queryTest2);
	  // queryTerms.push_back(queryTest3);
	  // queryTerms.push_back(queryTest4);

	  // queryTerms.push_back(queryTest5);
	  // queryTerms.push_back(queryTest6);
	  // queryTerms.push_back(queryTest7);


	  // queryTerms.push_back(queryTerm0);
	  // queryTerms.push_back(queryTerm1);
	  // queryTerms.push_back(queryTerm2);
	  // queryTerms.push_back(queryTerm3);
	  // queryTerms.push_back(queryTerm4);
	  // queryTerms.push_back(queryTerm5);
	  // queryTerms.push_back(queryTerm6);
	  // queryTerms.push_back(queryTerm7);
	  // queryTerms.push_back(queryTerm8);

	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/24
	  // OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

	  // NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");

	  // For the machine pangolin:
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
	  // For the machine dodo:
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  // OLD 32bit version in order to prune the original index
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");

	  // ready to DELETE on 2013/08/05 night
	  // IndexCat index_cat(command_line_args.index_files1);

	  // OLD version, dumpped on 2013/08/04 afternoon by Wei
	  // LoadUpQueryTermsProbabilityDistribution_Advance(queryTermsTrueProbabilityDistributionMap,queryTerms1DProbabilityDistributionMap,queryTerms2DProbabilityDistributionMap,queryTermsGoodTuringProbabilityDistributionMap);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  layered_index_generator.LoadUpThreeFeatureValuesForMachineLearnedTraining();

	  // Updated by Wei 2013/08/05 night
	  // for the first probability factor
	  // aux maps for the first probability factor
	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();
	  // for the second probablity factor
	  // aux maps for the second probablity factor
	  layered_index_generator.LoadUpAuxFilesForSecondProbabilityFactor();


	  // Updated by Wei 2013/08/04 afternoon.
	  // I personally think that this function CreateHistogram do NOT need to take too long.
	  // Meaning do not need to take too much terms into consideration. Usually, tens of terms will be fine and do not need to take half a day to do this

	  // current version
	  layered_index_generator.CreateHistogram(terms, true, sortingMethodCodeForTheTerm);

	  // OLD version, dumpped on 2013/08/04 afternoon by Wei
	  // layered_index_generator.CreateHistogram(terms, true, sortingMethodCodeForTheTerm,queryTermsTrueProbabilityDistributionMap,queryTerms1DProbabilityDistributionMap,queryTerms2DProbabilityDistributionMap,queryTermsGoodTuringProbabilityDistributionMap);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Building score/value Histogram Ends." << std::endl;
  }
  else if (flagString == "14"){
	    cout << "real cut to the original index by universal importance score" << endl;
	    cout << "universal cut begins..." << endl;

		vector<string> queryTerms;
		float percentageToKeepOfTheWholeIndex = 1.0;
		int pruningMethodCodeOfTheWholeIndex = 2;
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // NONE

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  // Load the complete query terms for evaluation, for production
		  // LoadUpLexiconTermsWei20130213(queryTerms);

		  // updated by Wei 2013/02/13 add more sensitive terms to do test

		  // string queryTest0 = "snuff";	// 3854 postings.
		  // string queryTest1 = "snyder";	// 47802 postings.
		  // string queryTest2 = "so";	// 3219389 postings.
		  string queryTest3 = "soalr";	// 10 postings.
		  // string queryTest4 = "soap";	// 175292 postings.

		  // string queryTest5 = "0";	// 8400333 postings.
		  // string queryTest6 = "113";	// 599688 postings.
		  // string queryTest7 = "2";	// 10966214 postings.

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  //string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";

		  // queryTerms.push_back(queryTest0);
		  // queryTerms.push_back(queryTest1);
		  // queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  // queryTerms.push_back(queryTest4);

		  // queryTerms.push_back(queryTest5);
		  // queryTerms.push_back(queryTest6);
		  // queryTerms.push_back(queryTest7);

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);



		// option1: months ago
		// command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

		// option2: updated 2013/01/24
		// OLD 32bit version
		// command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

		// NEW 64bit version
		command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
		// for the machine pangolin
		command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
		// for the machine dodo
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		// OLD 32bit version in order to prune the original index
		// option1 (32bit lexicon with the original index):
		// command_line_args.index_files1 = ParseIndexName("index");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");

		// option2 (32bit lexicon only the query terms):
		// command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-27-21-47-55-100%");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		IndexCat index_cat(command_line_args.index_files1);

		time_t now;
		now = time(NULL);
		char buff[20];
		strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));

		const string output_index_prefix = "wei_pruning_development_universal_cutting_" + string(buff) + "-" + make_the_value_into_string_format(percentageToKeepOfTheWholeIndex) + "-" + make_the_value_into_string_format(pruningMethodCodeOfTheWholeIndex);
		// Lots of common between the pruning and layering project.
		// From now, we use the class LayeredIndexGenerator to do pruning as well.
		// Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
		LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
		Timer pruning_time;
		layered_index_generator.CutBasedOnUniversalImportanceScore(queryTerms, false, true, percentageToKeepOfTheWholeIndex, pruningMethodCodeOfTheWholeIndex);
		GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
		cout << "The output_index_prefix: " << output_index_prefix << endl;
	    cout << "universal cut ends." << endl;
  }
  else if (flagString == "15"){
	  cout << "Load the query term probability distribution begins..." << endl;
	  map<string,float> queryTermsProbabilityDistributionMap;

	  LoadUpQueryTermsProbabilityDistribution(queryTermsProbabilityDistributionMap);

	  for(map<string,float>::iterator queryTermsProbabilityDistributionMapIterator = queryTermsProbabilityDistributionMap.begin(); queryTermsProbabilityDistributionMapIterator != queryTermsProbabilityDistributionMap.end(); queryTermsProbabilityDistributionMapIterator++){
		  cout << queryTermsProbabilityDistributionMapIterator->first << " " << queryTermsProbabilityDistributionMapIterator->second << endl;
	  }

	  cout << "Load the query term probability distribution ends." << endl;
  }
  else if (flagString == "16"){
	    cout << "Given a set of queryTerms, sort the inverted index of each of them, and output the threshold of corresponding percentage cut for the pruning method TCP." << endl;
	    cout << "procedure begins..." << endl;
		map<string,int> queryTerms;
		// Updated by Wei on 2014/01/26 night at school
		queryTerms["cults"] = 1;
		queryTerms["doomsday"] = 1;
		queryTerms["freighter"] = 1;

		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // NONE

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  // Load the complete query terms for evaluation, for production
		  // LoadUpLexiconTermsWei20130213(queryTerms);

		  // Load some terms into the vector and given the file name
		  // for the machine pangolin
		  // string fileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermsONLYInputfileName));
		  // for the machine dodo
		  // string fileName = "/home/diaosi/gov2ClearYourMindAndDoItAgain/queryTermsFor100KQueriesTail100";
		  // LoadUpTerms(queryTerms,fileName);

		  // updated by Wei 2013/02/13 add more sensitive terms to do test

		  // string queryTest0 = "snuff";	// 3854 postings.
		  // string queryTest1 = "snyder";	// 47802 postings.
		  // string queryTest2 = "so";	// 3219389 postings.
		  // string queryTest3 = "soalr";	// 10 postings.
		  // string queryTest4 = "soap";	// 175292 postings.

		  // string queryTest5 = "0";	// 8400333 postings.
		  // string queryTest6 = "113";	// 599688 postings.
		  // string queryTest7 = "2";	// 10966214 postings.

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  // string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";

		  // queryTerms.push_back(queryTest0);
		  // queryTerms.push_back(queryTest1);
		  // queryTerms.push_back(queryTest2);
		  // queryTerms[queryTest3] = 1;
		  // queryTerms.push_back(queryTest4);

		  // queryTerms.push_back(queryTest5);
		  // queryTerms.push_back(queryTest6);
		  // queryTerms.push_back(queryTest7);

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);



		// option1: months ago
		// command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

		// option2: updated 2013/01/24
		// OLD 32bit version
		// command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

		// 64bit index lexicon for all terms
		// command_line_args.index_files1 = ParseIndexName("LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None");
		// command_line_args.index_files1.SetDirectory("/data/obukai/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

		// NEW 64bit version
		command_line_args.index_files1 = ParseIndexName("LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None");
		// for the machine pangolin
		// command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
		// for the machine dodo
		command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		// OLD 32bit version in order to prune the original index
		// option1 (32bit lexicon with the original index):
		// command_line_args.index_files1 = ParseIndexName("index");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");

		// option2 (32bit lexicon only the query terms):
		// command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-27-21-47-55-100%");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		IndexCat index_cat(command_line_args.index_files1);

		time_t now;
		now = time(NULL);
		char buff[20];
		strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));

		const string output_index_prefix = "wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";
		// Lots of common between the pruning and layering project.
		// From now, we use the class LayeredIndexGenerator to do pruning as well.
		// Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
		LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
		Timer pruning_time;

		layered_index_generator.CreateCutThresholdOfEachTermBasedOnPercentageForMultipleTerms(queryTerms, false);

		GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
		cout << "The output_index_prefix: " << output_index_prefix << endl;

	    cout << "procedure ends." << endl;
  }
  else if (flagString == "17"){
	  cout << "build the query view" << endl;
	  std::cout << "Building the query view(QV) Procedure Begins..." << std::endl;

	  // Updated by Wei 2013/02/28
	  // maybe this is NOT the good place to build the query view. Can we try to build the query view from the query processor?

	  std::cout << "Building the query view(QV) Procedure Ends." << std::endl;
  }
  else if (flagString == "18"){
	  cout << "run a program using the boost combinations library(made by OUR UNIVERSITY)" << endl;

	  const int r = 2;
	  const int n = 4;
	  std::vector<int> v_int(n);

	  for (int i = 0; i < n; ++i) { v_int[i] = i; }

	  /*
	  v_int[3] = 3;
	  v_int[2] = 2;
	  v_int[1] = 1;
	  v_int[0] = 0;
	  */

	  int N = 0;
	  do {
	      ++N;
	      if (N < 10 || N > 117) {
	          std::cout << "[ " << v_int[0];
	          for (int j = 1; j < r; ++j) { std::cout << ", " << v_int[j]; }
	          std::cout << " ]" << std::endl;
	      } else if (N == 10) {
	          std::cout << "  . . ." << std::endl;
	      }
	  } while (next_combination(v_int.begin(), v_int.begin() + r, v_int.end()));
	  std::cout << "Found " << N << " combinations of size " << r << " without repetitions"
	            << " from a set of " << n << " elements." << std::endl;

  }
  else if (flagString == "19"){
	  cout << "(1) Given a set of documents(sorted by their trecIDs) (done)"
			  "(2) Decompress the corresponding compress file into main memory (done)"
			  "(3) Extract the corresponding edges of the specified web pages (done)"
			  "(4) Dump this set of edges into disk (done)"
			  "(5) Record the beginning position and the ending position of each document in the compress file(NOT done)" << endl;



	  // step2: decompress the specific file and get the information
	  GetDefaultLogger().Log("Document collection information extraction...", false);

	  // Get collection indexer in order to do indexing on the specific documents.
	  CollectionIndexer& collection_indexer = GetCollectionIndexer();

	  // Input to the indexer is a list of document collection files we want to index in order.
	  // Deal with the input file.
	  // The cin object will automatically connect with the command line arguments. "< fileName"
	  collection_indexer.ProcessDocumentCollections(cin);


	  // Start timing indexing process.
	  Timer index_time;

	  //All the things have been done in this function,including the parsing stage, the posting collection stage and the index builder stage.
	  collection_indexer.ParseDocumentCollectionsAndExtractingInfoForPhase2Pruning();

	  // for debug ONLY
	  // cout << "total number of documents indexed(mark1): " << collection_indexer.doc_id() << endl;
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
	  // End timing indexing process.
  }
  else if (flagString == "20"){
	    cout << "Updated by Wei 2013/03/21" << endl;
	    cout << "Cause Prof think this is just a small part, let's shit to a more fast and flexible way of solving this problem." << endl;
	    cout << "PLEASE DO NOT DEVELOP THIS FUNCTION ANY MORE. TRY TO FIND A BAND NEW WAY TO DO THIS" << endl;

	    cout << "The invertion of the original gov2 inverted index" << endl;
	    cout << "The input will be the original gov2 inverted index" << endl;
	    cout << "THe output will be the forward index, maintaining the same info as the inverted index" << endl;
	    cout << "forward index building begins..." << endl;

		vector<string> queryTerms;
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // NONE

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  // Load the complete query terms for evaluation, for production
		  // LoadUpLexiconTermsWei20130213(queryTerms);

		  // updated by Wei 2013/02/13 add more sensitive terms to do test

		  // string queryTest0 = "snuff";	// 3854 postings.
		  // string queryTest1 = "snyder";	// 47802 postings.
		  // string queryTest2 = "so";	// 3219389 postings.
		  string queryTest3 = "soalr";	// 10 postings.
		  // string queryTest4 = "soap";	// 175292 postings.

		  // string queryTest5 = "0";	// 8400333 postings.
		  // string queryTest6 = "113";	// 599688 postings.
		  // string queryTest7 = "2";	// 10966214 postings.

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  //string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";

		  // queryTerms.push_back(queryTest0);
		  // queryTerms.push_back(queryTest1);
		  // queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  // queryTerms.push_back(queryTest4);

		  // queryTerms.push_back(queryTest5);
		  // queryTerms.push_back(queryTest6);
		  // queryTerms.push_back(queryTest7);

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);



		// option1: months ago
		// command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

		// option2: updated 2013/01/24
		// OLD 32bit version
		// command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

		// NEW 64bit version
		command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
		// for the machine pangolin
		command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
		// for the machine dodo
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		// OLD 32bit version in order to prune the original index
		// option1 (32bit lexicon with the original index):
		// command_line_args.index_files1 = ParseIndexName("index");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");

		// option2 (32bit lexicon only the query terms):
		// command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-27-21-47-55-100%");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		IndexCat index_cat(command_line_args.index_files1);

		time_t now;
		now = time(NULL);
		char buff[20];
		strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));

		const string output_index_prefix = "wei_pruning_development_forward_index_" + string(buff);
		// Lots of common between the pruning and layering project.
		// From now, we use the class LayeredIndexGenerator to do pruning as well.
		// Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
		LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
		Timer pruning_time;
		// The following are the arguments for the BuildForwardIndex(...) function
		// argument1: vector<string> & queryTerms,
		// argument2: bool debugFlag,
		// argument3: bool store_computed_score_into_external_index_flag
		layered_index_generator.BuildForwardIndex(queryTerms, false, true);
		GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
		cout << "The output_index_prefix: " << output_index_prefix << endl;
		cout << "forward index building ends." << endl;
  }
  else if (flagString == "21"){
	      cout << "Getting the rank in the doc feature" << endl;
		  cout << "(1) Given a set of documents(sorted by their trecIDs) (done)"
				  "(2) Decompress the corresponding compress file into main memory (done)"
				  "(3) Record and sign the posting rank in the doc (need to be updated by Wei 2013/07/14)" << endl;

		  // step2: decompress the specific file and get the information
		  GetDefaultLogger().Log("Document collection information extraction...", false);

		  // Get collection indexer in order to do indexing on the specific documents.
		  CollectionIndexer& collection_indexer = GetCollectionIndexer();

		  // Input to the indexer is a list of document collection files we want to index in order.
		  // Deal with the input file.
		  // The cin object will automatically connect with the command line arguments. "< fileName"
		  collection_indexer.ProcessDocumentCollections(cin);


		  // Start timing indexing process.
		  Timer index_time;

		  //All the things have been done in this function,including the parsing stage, the posting collection stage and the index builder stage.
		  collection_indexer.ParseDocumentCollectionsAndExtractingInfoForPhase2Pruning();

		  // for debug ONLY
		  // cout << "total number of documents indexed(mark1): " << collection_indexer.doc_id() << endl;
		  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
		  // End timing indexing process.


		  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

		  uint64_t posting_count = GetPostingCollectionController().posting_count();

		  cout << "Collection Statistics:\n";
		  cout << "total posting count: " << posting_count << "\n";
		  cout << "total number of documents indexed: " << collection_indexer.doc_id() << endl;

  }
  else if (flagString == "22"){
	      cout << "This is the task for generating the Xdoc value for each document in the collection(like in gov2 dataset)" << endl;

		  // step2: decompress the specific file and get the information
		  GetDefaultLogger().Log("Document collection information extraction...", false);

		  // Get collection indexer in order to do indexing on the specific documents.
		  CollectionIndexer& collection_indexer = GetCollectionIndexer();

		  // Input to the indexer is a list of document collection files we want to index in order.
		  // Deal with the input file.
		  // The cin object will automatically connect with the command line arguments. "< fileName"
		  collection_indexer.ProcessDocumentCollections(cin);


		  // Start timing indexing process.
		  Timer index_time;

		  //All the things have been done in this function,including the parsing stage, the posting collection stage and the index builder stage.
		  collection_indexer.ParseDocumentCollectionsAndExtractingInfoForPhase2Pruning();

		  // for debug ONLY
		  // cout << "total number of documents indexed(mark1): " << collection_indexer.doc_id() << endl;
		  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
		  // End timing indexing process.


		  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

		  uint64_t posting_count = GetPostingCollectionController().posting_count();

		  cout << "Collection Statistics:\n";
		  cout << "total posting count: " << posting_count << "\n";
		  cout << "total number of documents indexed: " << collection_indexer.doc_id() << endl;

  }
  else if (flagString == "23"){
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
	  layered_index_generator.OutputTrecIDAndDocIDAndDocSizeInWordsToScreen();
  }
  else if (flagString == "24"){
	  cout << "Producing the set of probability for the randomly selected postings begins..." << std::endl;

	  // 64bit index lexicon for ONLY query terms
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 32bit index lexicon for all terms
	  // Updated by Wei 2013/08/30 morning at school, usually, this is for applying to the whole lexicon that you can use the following original index
	  // Accordingly, the index_reader should be changed to 32bit as well.
	  command_line_args.index_files1 = ParseIndexName("index");
	  // The following is for dodo machine:
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");
	  // The following is for pangolin machine:
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // debug mark1:
	  // cout << "Passed mark 1" << endl;
	  // exit(1);

	  Timer duration_time;

	  // Updated by Wei 2013/08/28 night
	  // for 1D
	  // layered_index_generator.LoadUpTheCombinationOfSecondANDThirdFactorProbabilityTable1D();
	  // Updated by Wei 2013/12/06 night
	  // Updated by Wei 2013/12/21 night
	  // Updated by Wei 2014/01/05 afternoon
	  // for the following methods
	  // (1)(impact score, list length) and (2)(quad tree, list length) and (3) part of the (relrank, list length)
	  layered_index_generator.LoadUpTheCombinationOfSecondANDThirdFactorProbabilityTable2D();
	  // Updated by Wei 2014/01/05 afternoon (newly added)
	  layered_index_generator.LoadUpTermPieceInfoForRelRank();

	  // Updated by Wei 2013/12/21 night
	  // for 2D quadTree (STOP construction cause I do NOT whether it can also be compatible with the existing code)
	  // layered_index_generator.LoadUpTheCombinationOfSecondANDThirdFactorProbabilityTable2D_quadTree();


	  // Updated by Wei 2013/08/06 night
	  // Updated by Wei 2013/12/06 night
	  layered_index_generator.LoadUpRandomlySelectedPostings();

	  layered_index_generator.LoadUpThreeFeatureValuesForMachineLearnedTraining();
	  // for the first probability factor
	  // aux maps for the first probability factor

	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();

	  // for the second probablity factor
	  // aux maps for the second probablity factor
	  // I don't think we need this load at this moment.
	  // Updated by Wei on 2013/12/06 night at school
	  // layered_index_generator.LoadUpAuxFilesForSecondProbabilityFactor();

	  // Updated by Wei on 2013/12/09 night at school
	  // layered_index_generator.LoadTermsWithTheirLengthOfList();

	  // Used since 2013/08/28 by Wei to have the global threshold for the simplified 3 factor probability formula
	  // Updated by Wei 2013/12/09 night by Wei at school
	  // Updated by Wei 2014/01/03 afternoon by Wei at school
	  layered_index_generator.ProduceProbabilitiesForRandomlySelectedPostingsBaseline();

	  // Used a month ago by Wei to have the global threshold for our 3 factor probability formula
	  // layered_index_generator.ProduceProbabilitiesForRandomlySelectedPostings();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Producing the set of probability for the randomly selected postings ends." << std::endl;
  }
  else if (flagString == "25"){
	  cout << "Outputing the 25M documents and with their num of postings formed in the index begins..." << std::endl;

	  // 64bit
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 32bit
	  command_line_args.index_files1 = ParseIndexName("index");
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;

	  layered_index_generator.LoadUpSelectedTerms();
	  layered_index_generator.OutputingEachDocumentWithTheirNumOfPostingsInIndex();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Outputing the 25M documents and with their num of postings formed in the index ends." << std::endl;
  }
  else if (flagString == "26"){
	  cout << "Input: an OLD 32BIT lexicon; Output: a NEW 64BIT lexicon begins..." << endl;

	  // 64Bit
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 32Bit
	  command_line_args.index_files1 = ParseIndexName("index");
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;

	  layered_index_generator.ConvertingOLD32BitLexiconToNEW64BitLexicon();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()) + " seconds.", false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;


	  cout << "Input: an OLD 32BIT lexicon; Output: a NEW 64BIT lexicon ends." << endl;
  }
  else if (flagString == "27"){
	  cout << "compute the posting rank in list and store the rank in the external inverted index" << endl;
	  cout << "Storing Procedure Begins..." << endl;

	  // if there are some terms in queryTermsDictForDebugging, THEN it is in debug mode
	  // if there are NOTHING in queryTermsDictForDebugging(size==0), THEN it is in production mode
	  map<string,int> queryTermsDictForDebugging;
	  // string tempInputFileName = "/data3/obukai/workspace/web-search-engine-wei/polyIRIndexer/tempSetOfTermsForDebuggingMultipleExternalIndexAccess_head_9";
	  // LoadUpTerms(queryTermsDictForDebugging,tempInputFileName);



	  // string queryTest0 = "snuff";
	  // string queryTest1 = "snyder";
	  // string queryTest2 = "so";
	  string queryTest3 = "soalr";
	  // string queryTest4 = "soap";

	  // 00000000000000000000 0000000000000000000

	  // ideal for debugging, mark1
	  // string queryTerm0 = "00000000000000000000"; //70 postings.
	  // string queryTerm1 = "0"; //8400333 postings.

	  // ideal for debugging, mark2
	  // string queryTerm2 = "0000000000000000000"; //46 postings.
	  //string queryTerm3 = "000000000000000000"; //82 postings.
	  //string queryTerm4 = "0000000000000000"; //251 postings.
	  //string queryTerm5 = "00000000000000000"; //87 postings.

	  // ideal for debugging, mark3, 2 postings
	  // string queryTerm6 = "000sites";

	  // ideal for debugging, mark4
	  // string queryTerm7 = "00wc";
	  // string queryTerm8 = "03255";
	  // string queryTerm9 = "1031b"; // 27 postings 30
	  // string queryTerm10 = "10cs"; // 19 postings 30

	  // queryTermsDictForDebugging[queryTest0] = 1;
	  // queryTermsDictForDebugging[queryTest1] = 1;
	  // queryTermsDictForDebugging[queryTest2] = 1;
	  queryTermsDictForDebugging[queryTest3] = 1;
	  // queryTermsDictForDebugging[queryTest4] = 1;

	  // queryTerms.push_back(queryTerm0);
	  // queryTerms.push_back(queryTerm1);
	  // queryTermsDictForDebugging[queryTerm2] = 1;
	  // queryTerms.push_back(queryTerm3);
	  // queryTerms.push_back(queryTerm4);
	  // queryTerms.push_back(queryTerm5);
	  // queryTerms.push_back(queryTerm6);
	  // queryTerms.push_back(queryTerm7);
	  // queryTerms.push_back(queryTerm8);
	  // queryTerms.push_back(queryTerm9);
	  // queryTerms.push_back(queryTerm10);

	  // Updated on 2013/09/12 afternoon by Wei at school
	  // option1: OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // Path for the server Pangolin
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");
	  // Path for the server DODO
	  // command_line_args.index_files1.SetDirectory("N/A");

	  // option2: NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // Path for the server Pangolin
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
	  // Path for the server DODO
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  // NO USE maybe(20130912 night)
	  // IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";


	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  //TODO: ??? the output meta index, some numbers are not that consistent with the original one.
	  // The 1st argument is the vector queryTerms which have all the query terms ready for pruning.
	  // The 2ed argument is the bool argument of the debugFlag. True for opening the debugging mode, false for closing the debugging mode.
	  // The 3rd argument is the bool argument of the switch for storing the computed scores into external index. True for yes and false for no.
	  // No matter what, the external index will be generated.
	  // If the flag set to false, it will ONLY store (1)chunk max score and (2)block max score
	  // If the flag set to true, it will ALSO store (2) the aux info for each posting which is very useful in the context of pruning
	  // Updated by Wei 2013/01/26, I think I am almost there and just keep going.
	  // Notes: Let's try this today:2013/01/26
	  cout << "store the external score into the index set to be true" << endl;

	  // old version, prune based on each inverted list
	  // layered_index_generator.CreatePrunedIndexForMultipleTerms(queryTerms, false, true);

	  // I do NOT know the usage of this variable
	  map<string,float> queryTermsProbabilityDistributionMap;
	  layered_index_generator.StorePostingRankInListToExternalIndex(queryTermsDictForDebugging, true);
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Storing Procedure Ends." << endl;
  }
  else if (flagString == "28"){
	  cout << "Build a forward index including the value: posting_rank_in_list" << endl;
	  // get the rank as fast as I can. The set of terms pretending to be the document1 is:
	  // total length is 9: {all, feedback, consolidated, sources, planning, including, questions, find, looking}

      // index path options:
	  // current set of index in test
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-13-38-39_None_None";
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-10-20-32-07_None_None"; // This set of indexes has passed the test
	  string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  // some computation methods needs this info, so load it. So just do NOT need this, can just comment this out
	  // LoadUpQueryTermsProbabilityDistribution(queryTermsProbabilityDistributionMap);

      LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
                                     command_line_args.query_mode_local, command_line_args.result_format_local);
  }
  else if (flagString == "29"){
	  cout << "Prototyping the posting oriented uniform pruning method." << endl;
	  // Step1: the index will be used for loading the postings
	  // Option1: The below index contains ONLY the query terms, the # of terms are: 38449 (a little smaller than the actual # of query terms (38871) in 100K queries)
	  // (Used for debugging)
	  // string indexName = "LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%";
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // Option2: The below index contains ALL terms in the lexicon
	  // (Used for production)
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25";
	  // string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // Updated by Wei 2013/09/30 morning at school
	  // Updated by Wei 2014/02/03 afternoon at school
	  layered_index_generator.LoadUpFinalTOP10DocumentResultRelatedPostings();

	  // Updated by Wei 2013/09/17 night at school
	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();

	  // Updated by Wei 2013/09/13 afternoon
	  // Need to load up the probability table first in order to do the posting oriented pruning
	  layered_index_generator.LoadUpProbabilityTableBasedOnListLengthANDRelativeRank();

	  // Updated by Wei 2013/09/21 night
	  // Need to load up the <docID,# of postings recorded> in order to allocate the memory :)
	  // Use 60Secs to load all the things into main memory
	  // Debug, do NOT used. Production, please used.
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  // Updated by Wei 2013/09/15 afternoon at school
	  // Need to load up the <termID,term> pair in order to for pruning
	  layered_index_generator.LoadUpTermIDANDTermPairs();

	  // Updated by Wei 2013/09/22 afternoon at school
	  // This function needs to be called after the above functions have been called.
	  layered_index_generator.buildTermIDWithTheirFirstFactorProbabilityMap();

	  // exit(1);

	  Timer processing_time;
	  // Currently used and optimized by Wei on 2013/09/21 afternoon at school
	  // optimized version
	  // Why so so SLOW when doing the step4???
	  // Because I need to pass the map by reference, using the & at declaration time.
	  // Solved by Wei on 2013/09/22 night at school
	  layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethodOptimizedVersion(false);
	  // un-optimized version
	  // layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethod(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;

  }
  else if (flagString == "30"){
	  cout << "This example shows how much memory will be used for the longest list." << endl;
	  // Step1: the index will be used for loading the postings
	  // Option1: The below index contains ONLY the query terms, the # of terms are: 38449 (a little smaller than the actual # of query terms (38871) in 100K queries)
	  // (Used for debugging)
	  string indexName = "LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%";
	  // Option2: The below index contains ALL terms in the lexicon
	  // (Used for production)
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer processing_time;

	  // Currently used and optimized by Wei on 2013/09/20 afternoon at school
	  layered_index_generator.ComputeHowMuchMemoryWillBeUsed();

	  // NOT used but leave as an reference since 2013/09/20 morning by Wei at school
	  // layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethod(false);
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if (flagString == "31"){
	  cout << "NOT yet filled begins..." << std::endl;

	  // 64bit index lexicon for ONLY query terms
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 32bit index lexicon for all terms (preferred)
	  // Updated by Wei 2013/08/30 morning at school, usually, this is for applying to the whole lexicon that you can use the following original index
	  // Accordingly, the index_reader should be changed to 32bit as well.
	  command_line_args.index_files1 = ParseIndexName("index");
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;

	  // Updated by Wei 2013/08/06 night
	  layered_index_generator.LoadUpRandomlySelectedPostings();

	  // Used since 2013/09/24 night by Wei at school to output a set of documents needed to be parsed given a set of postings as input
	  layered_index_generator.OutputASetOfDocumentsNeededToBeParsedGivenASetOfPostingsAsInput();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "NOT yet filled ends." << std::endl;
  }
  else if (flagString == "32"){
	  cout << "Documents picking analysis. Updated by Wei on 2013/10/22 morning at school." << endl;
	  cout << "uncompress 1 gzipped file" << endl;

	  // Step1: the index will be used for loading the postings
	  // Option1: The below index contains ONLY the query terms, the # of terms are: 38449 (a little smaller than the actual # of query terms (38871) in 100K queries)
	  // (Used for debugging)
	  // sub-option1:
	  // string indexName = "LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%";
	  // sub-option2:
	  // string indexName = "index";
	  // Option2: The below index contains ALL terms in the lexicon
	  // (Used for production)
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // sub-option1:
	  string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      // sub-option2:
	  // string indexPath = "/data3/obukai/workspace/web-search-engine-wei/polyIRIndexer/";
	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // Updated by Wei 2013/11/05 afternoon at school
	  // This load is for evaluation purpose
	  // Updated by Wei 2013/10/30 morning at school
	  // I need to load this file in order to do the end-to-end evaluation
	  // Updated by Wei 2013/09/19 afternoon at school
	  layered_index_generator.LoadUpFinalTOP10DocumentResultRelatedPostings();

	  // Updated by Wei 2013/11/05 afternoon at school
	  // This load is for computing the P(t) probability of the formula
	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed.
	  // Updated by Wei 2013/09/17 night at school
	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();

	  // Updated by Wei 2013/11/05 afternoon at school
	  // This load is to LOAD UP the P(TOP) probability of the formula. We of course can change it to a bucket style
	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed.
	  // Updated by Wei 2013/09/13 afternoon
	  // Need to load up the probability table first in order to do the posting oriented pruning
	  layered_index_generator.LoadUpProbabilityTableBasedOnListLengthANDRelativeRank();

	  // Updated by Wei 2013/11/05 afternoon at school
	  // This load should be varied based on HOW MANY postings you want to process
	  // Updated by Wei 2013/10/22 morning at school
	  // A switch should be implemented for PRODUCTION mode and DEBUGGING mode.
	  // This is needed
	  // Updated by Wei 2013/09/21 night
	  // Need to load up the <docID,# of postings recorded> in order to allocate the memory :)
	  // Use 60Secs to load all the things into main memory
	  // Debug, do NOT used. Production, please used.
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  // Updated by Wei 2013/11/05 afternoon at school
	  // This load should be varied based on HOW MANY terms existed in the index
	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed
	  // Updated by Wei 2013/09/15 afternoon at school
	  // Need to load up the <termID,term> pair in order to for pruning
	  layered_index_generator.LoadUpTermIDANDTermPairs();

	  // Updated by Wei 2013/11/05 afternoon at school
	  // key: termID
	  // value: first probability
	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed
	  // Updated by Wei 2013/09/22 afternoon at school
	  // This function needs to be called after the above functions have been called.
	  layered_index_generator.buildTermIDWithTheirFirstFactorProbabilityMap();

	  Timer processing_time;
	  // Currently used and optimized by Wei on 2013/10/28 afternoon at school
	  // optimized version
	  // Why so so SLOW when doing the step4???
	  // Because I need to pass the map by reference, using the & at declaration time.
	  // Solved by Wei on 2013/09/22 night at school
	  layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethodOptimizedVersion(false);
	  // un-optimized version
	  // layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethod(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if (flagString == "33"){
	  cout << "Test the sort(...) OP begins..." << std::endl;

	  // 64bit index lexicon for ONLY query terms
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 32bit index lexicon for all terms
	  // Updated by Wei 2013/08/30 morning at school, usually, this is for applying to the whole lexicon that you can use the following original index
	  // Accordingly, the index_reader should be changed to 32bit as well.
	  command_line_args.index_files1 = ParseIndexName("index");
	  // The following is for dodo machine:
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");
	  // The following is for pangolin machine:
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // debug mark1:
	  // cout << "Passed mark 1" << endl;
	  // exit(1);

	  Timer duration_time;

	  // Updated by Wei 2013/08/28 night
	  // for 1D
	  // layered_index_generator.LoadUpTheCombinationOfSecondANDThirdFactorProbabilityTable1D();
	  // Updated by Wei 2013/12/06 night
	  // Updated by Wei 2013/12/21 night
	  // for 2D and the quadTree (maybe compatible with the relrank as well?)
	  layered_index_generator.LoadUpTheCombinationOfSecondANDThirdFactorProbabilityTable2D();
	  // Updated by Wei 2013/12/21 night
	  // for 2D quadTree (STOP construction cause I do NOT whether it can also be compatible with the existing code)
	  // layered_index_generator.LoadUpTheCombinationOfSecondANDThirdFactorProbabilityTable2D_quadTree();


	  // Updated by Wei 2013/08/06 night
	  // Updated by Wei 2013/12/06 night
	  layered_index_generator.LoadUpRandomlySelectedPostings();

	  layered_index_generator.LoadUpThreeFeatureValuesForMachineLearnedTraining();
	  // for the first probability factor
	  // aux maps for the first probability factor

	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();

	  // for the second probablity factor
	  // aux maps for the second probablity factor
	  // I don't think we need this load at this moment.
	  // Updated by Wei on 2013/12/06 night at school
	  // layered_index_generator.LoadUpAuxFilesForSecondProbabilityFactor();

	  // Updated by Wei on 2013/12/09 night at school
	  // layered_index_generator.LoadTermsWithTheirLengthOfList();

	  // Used since 2013/08/28 by Wei to have the global threshold for the simplified 3 factor probability formula
	  // Updated by Wei 2013/12/09 night by Wei at school
	  layered_index_generator.SortTest();

	  // Used a month ago by Wei to have the global threshold for our 3 factor probability formula
	  // layered_index_generator.ProduceProbabilitiesForRandomlySelectedPostings();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Test the sort(...) OP ends." << std::endl;
  }
  else if (flagString == "34"){
	  cout << "flagString: " << flagString << " NOT supported."<< endl;
	  exit(1);
  }
  else if (flagString == "35"){
	  cout << "output the 'rank in the list' of each posting begins..." << std::endl;

	  // 64bit index lexicon for ONLY query terms
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 64bit index lexicon for all terms
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None");
	  // The following is for dodo machine:
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25/");
	  // The following is for pangolin machine:
	  // N/A

	  // 32bit index lexicon for all terms
	  // Updated by Wei 2013/08/30 morning at school, usually, this is for applying to the whole lexicon that you can use the following original index
	  // Accordingly, the index_reader should be changed to 32bit as well.
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // The following is for dodo machine:
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");
	  // The following is for pangolin machine:
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;

	  layered_index_generator.OutputRankInListForEachPosting();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "output the 'rank in the list' of each posting ends." << std::endl;
  }
  else{
	  cout << "System do NOT recognize the flag value." << endl;
	  exit(1);
  }
}



void Diff() {
  IndexDiff index_diff(command_line_args.index_files1, command_line_args.index_files2);
  index_diff.Diff(command_line_args.term, command_line_args.term_len);
}




void Index() {
  GetDefaultLogger().Log("Indexing document collection...", false);

  // Get collection indexer in order to do indexing on the specific documents.
  CollectionIndexer& collection_indexer = GetCollectionIndexer();

  // Input to the indexer is a list of document collection files we want to index in order.
  // Deal with the input file.
  collection_indexer.ProcessDocumentCollections(cin);


  // Start timing indexing process.
  Timer index_time;

  //All the things have been done in this function,including the parsing stage, the posting collection stage and the index builder stage.
  collection_indexer.ParseDocumentCollections();

  // for debug ONLY
  // cout << "total number of documents indexed(mark1): " << collection_indexer.doc_id() << endl;
  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
  // End timing indexing process.


  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

  uint64_t posting_count = GetPostingCollectionController().posting_count();

  cout << "Collection Statistics:\n";
  cout << "total posting count: " << posting_count << "\n";
  cout << "total number of documents indexed: " << collection_indexer.doc_id() << endl;
}

IndexFiles ParseIndexName(const char* index_name) {
  assert(index_name != NULL);

  const char* colon = strchr(index_name, ':');
  if (colon != NULL) {
    const char* dot = strchr(colon + 1, '.');
    if (dot != NULL && (dot - colon) > 1 && strlen(dot + 1) > 0) {
      int group_num, file_num;
      group_num = atoi(colon + 1);
      file_num = atoi(dot + 1);
      return IndexFiles(string(index_name, (colon - index_name)), group_num, file_num);
    } else {
      GetErrorLogger().Log("Invalid index name specified on command line.", true);
    }
  } else {
    return IndexFiles(index_name);
  }

  return IndexFiles();
}

void SetConfigurationOption(string key_value) {
  size_t eq = key_value.find('=');
  if (eq != string::npos) {
    string key = key_value.substr(0, eq);
    string value = key_value.substr(eq + 1);
    bool override = Configuration::GetConfiguration().SetKeyValue(key, value);
    cout << key << " = " << value << (override ? " (override)" : " (add)") << endl;
  }
}

void UnrecognizedOptionValue(const char* option_name, const char* option_value) {
  cout << "Option '" << string(option_name) << "' has an unrecognized value of '" << string(option_value) << "'" << endl;
  exit(0);
}

void SignalHandlerIndex(int sig) {
/*  GetDefaultLogger().Log("Received termination request. Cleaning up now...", false);

  CollectionIndexer& collection_indexer = GetCollectionIndexer();
  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

  PostingCollectionController& posting_collection_controller = GetPostingCollectionController();
  // FIXME: It's possible that the parser callback will call this simultaneously as we're cleaning up.
  //        Set some special variable in class that's feeding the parser to indicate it to finish up.
  posting_collection_controller.Finish();*/

  exit(0);
}



// TODO: Proper cleanup needed, depending on what mode the program is running in. Delete incomplete indices, etc. Be careful about overwriting indices.
void InstallSignalHandler() {
  struct sigaction sig_action;
  sig_action.sa_flags = 0;
  // Mask SIGINT.
  sigemptyset(&sig_action.sa_mask);
  sigaddset(&sig_action.sa_mask, SIGINT);

  // Install the signal handler for the correct mode we were started in.
  switch (command_line_args.mode) {
    case CommandLineArgs::kIndex:
      sig_action.sa_handler = SignalHandlerIndex;
      break;
    default:
      sig_action.sa_handler = SIG_DFL;
      break;
  }

  sigaction(SIGINT, &sig_action, 0);
}

void Layerify() {
  GetDefaultLogger().Log("Creating layered index...", false);
  const char* output_index_prefix = (command_line_args.output_index_prefix != NULL ? command_line_args.output_index_prefix : "index_layered");
  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
  Timer layering_time;
  cout << "Wei modified version in(only first 10 terms)" << endl;
  layered_index_generator.CreateLayeredIndex();
  cout << "Wei modified version out" << endl;
  GetDefaultLogger().Log("Time Elapsed: " + Stringify(layering_time.GetElapsedTime()), false);
}

void Prune() {
  cout << "This function is no longer available, please use the --cat command with option value 4 instead. Updated by Wei: 2012/07/26" << endl;
}

void Init() {
  InstallSignalHandler();

#ifndef NDEBUG
  cout << "Compiled with assertions enabled.\n" << endl;
#endif
}

int main(int argc, char** argv) {
  //put the argv info into the data structure: command_line_args ONLY
  const char* opt_string = "ibslmqcdh";
  const struct option long_opts[] = { // Index the document collection bundles.
                                      { "index", no_argument, NULL, 'i' },

                                      // The whole system will act as an distributed information broker.
                                      { "broker", no_argument, NULL, 'b' },

                                      // The whole system will act as an distributed slave infomation provider.
                                      { "slave", no_argument, NULL, 's' },

                                      // The whole system will act independetly.
                                      { "local", no_argument, NULL, 'l' },

                                      // Merge the indices generated during the indexing step.
                                      { "merge", no_argument, NULL, 'm' },

                                      // Override the default merge degree.
                                      { "merge-degree", required_argument, NULL, 0 },

                                      // Specify the files to merge and their resulting index names on stdin.
                                      { "merge-input", no_argument, NULL, 0 },

                                      // Query an index.
                                      { "query", no_argument, NULL, 'q' },

                                      // Set which query algorithm we want to use.
                                      { "query-algorithm", required_argument, NULL, 0 },

                                      // Set which query mode we want to use.
                                      { "query-mode", required_argument, NULL, 0 },

                                      // Use the following stop word list at query time.
                                      { "query-stop-list-file", required_argument, NULL, 0 },

                                      // Set which result format we want to use.
                                      { "result-format", required_argument, NULL, 0 },

                                      // Outputs inverted list data in a human readable format.
                                      { "cat", no_argument, NULL, 'c' },

                                      // Specify the inverted list (term) on which we want to run the cat procedure.
                                      { "cat-term", required_argument, NULL, 0 },

                                      // Outputs the differences between two inverted lists.
                                      { "diff", no_argument, NULL, 'd' },

                                      // Specify the inverted list (term) on which we want to run the diff procedure.
                                      { "diff-term", required_argument, NULL, 0 },

                                      // Remaps an index. The argument specifies the document mapping file to use for the remap procedure.
                                      { "remap", required_argument, NULL, 0 },

                                      // Creates a layered index.
                                      { "layerify", no_argument, NULL, 0 },

                                      // prune operation.
                                      { "prune", no_argument, NULL, 0 },

                                      // Retrieves index data for an inverted list into an in-memory array. See function 'RetrieveIndexData()'.
                                      { "retrieve-index-data", required_argument, NULL, 0 },

                                      // Loops over an inverted list (decompresses but does not do any top-k). Useful for benchmarking decompression coders.
                                      { "loop-over-index-data", required_argument, NULL, 0 },

                                      // Loads the index into main memory.
                                      { "in-memory-index", no_argument, NULL, 0 },

                                      // Memory maps the index into our address space.
                                      { "memory-map-index", no_argument, NULL, 0 },

                                      // Builds an in-memory block level index.
                                      { "block-level-index", no_argument, NULL, 0 },

                                      // Loads and uses the external index during query processing. Some query algorithms require it.
                                      // TODO: Currently not used. Algorithms that require it automatically load the external index.
                                      { "use-external-index", no_argument, NULL, 0 },

                                      // Generates a docID mapping file (docIDs are remapped by URL) that can be used as input to the remap procedure.
                                      { "generate-url-sorted-doc-mapping", required_argument, NULL, 0 },

                                      // Overrides/adds options defined in the configuration file.
                                      { "config-options", required_argument, NULL, 0 },

                                      // Runs compression tests on some randomly generated data.
                                      { "test-compression", no_argument, NULL, 0 },

                                      // Tests a specific coder.
                                      { "test-coder", required_argument, NULL, 0 },

                                      // Print help information.
                                      { "help", no_argument, NULL, 'h' },

                                      // Terminate options list.
                                      { NULL, no_argument, NULL, 0 } };

  int opt, long_index;
  while ((opt = getopt_long(argc, argv, opt_string, long_opts, &long_index)) != -1) {
    switch (opt) {
      case 'l':
        command_line_args.role = CommandLineArgs::kLocal;
        break;

      case 'i':
        command_line_args.mode = CommandLineArgs::kIndex;
        break;

      case 'm':
        command_line_args.mode = CommandLineArgs::kMergeInitial;
        break;

      case 'q':
        command_line_args.mode = CommandLineArgs::kQuery;
        break;

      case 'c':
        command_line_args.mode = CommandLineArgs::kCat;
        break;

      case 'd':
        command_line_args.mode = CommandLineArgs::kDiff;
        break;

      case 'h':
        Help();
        return EXIT_SUCCESS;

      case 0:
        // Process options which do not have a short arg.
        if (strcmp("merge-degree", long_opts[long_index].name) == 0) {
          command_line_args.merge_degree = atoi(optarg);
        } else if (strcmp("merge-input", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kMergeInput;
        } else if (strcmp("query-algorithm", long_opts[long_index].name) == 0) {
          if (strcmp("default", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDefault;
          else if (strcmp("daat-and", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDaatAnd;
          else if (strcmp("daat-or", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDaatOr;
          else if (strcmp("taat-or", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kTaatOr;
          else if (strcmp("dual-layered-overlapping-daat", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredOverlappingDaat;
          else if (strcmp("dual-layered-overlapping-merge-daat", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredOverlappingMergeDaat;
          else if (strcmp("multi-layered-daat-or", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kMultiLayeredDaatOr;
          else if (strcmp("multi-layered-daat-or-max-score", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kMultiLayeredDaatOrMaxScore;
          else if (strcmp("layered-taat-or-early-terminated", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kLayeredTaatOrEarlyTerminated;
          else if (strcmp("wand", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kWand;
          else if (strcmp("dual-layered-wand", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredWand;
          else if (strcmp("max-score", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kMaxScore;
          else if (strcmp("dual-layered-max-score", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredMaxScore;
          else if (strcmp("daat-and-top-positions", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDaatAndTopPositions;
          else
            UnrecognizedOptionValue(long_opts[long_index].name, optarg);
        } else if (strcmp("query-mode", long_opts[long_index].name) == 0) {
			if (strcmp("interactive", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kInteractive;
			else if (strcmp("interactive-single", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kInteractiveSingle;
			else if (strcmp("getPostingRankInListInteractive", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kGetPostingRankInListInteractive;
			else if (strcmp("bm25wsep", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kSpecialBM25wsep;
			else if (strcmp("special1", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kSpecialPurpose1;
			else if (strcmp("batch", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kBatch;
			else if (strcmp("getPostingRankInListBatch", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kGetPostingRankInListBatch;
			else if (strcmp("batch-bench", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kBatchBench;
			else
			  UnrecognizedOptionValue(long_opts[long_index].name, optarg);
        } else if (strcmp("query-stop-list-file", long_opts[long_index].name) == 0) {
          command_line_args.query_stop_words_list_file = optarg;
        } else if (strcmp("result-format", long_opts[long_index].name) == 0) {
			if (strcmp("trec", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kTrec;
			else if (strcmp("compare", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kCompare;
			else if (strcmp("discard", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kDiscard;
			else if (strcmp("pruning", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kPruning;
			else
			  UnrecognizedOptionValue(long_opts[long_index].name, optarg);
        } else if (strcmp("remap", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kRemap;
          command_line_args.doc_mapping_file = optarg;
        } else if (strcmp("layerify", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kLayerify;
        } else if (strcmp("prune", long_opts[long_index].name) == 0) {
            command_line_args.mode = CommandLineArgs::kPrune;
        } else if (strcmp("cat-term", long_opts[long_index].name) == 0 || strcmp("diff-term", long_opts[long_index].name) == 0) {
          command_line_args.term_len = strlen(optarg);
          command_line_args.term = optarg;
        } else if (strcmp("retrieve-index-data", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kRetrieveIndexData;
          command_line_args.term_len = strlen(optarg);
          command_line_args.term = optarg;
        } else if (strcmp("loop-over-index-data", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kLoopOverIndexData;
          command_line_args.term_len = strlen(optarg);
          command_line_args.term = optarg;
        } else if (strcmp("in-memory-index", long_opts[long_index].name) == 0) {
          command_line_args.in_memory_index = true;
          SetConfigurationOption(string(config_properties::kMemoryResidentIndex) + string("=true"));
        } else if (strcmp("memory-map-index", long_opts[long_index].name) == 0) {
          command_line_args.memory_mapped_index = true;
          SetConfigurationOption(string(config_properties::kMemoryMappedIndex) + string("=true"));
        } else if (strcmp("block-level-index", long_opts[long_index].name) == 0) {
          SetConfigurationOption(string(config_properties::kUseBlockLevelIndex) + string("=true"));
        } else if (strcmp("use-external-index", long_opts[long_index].name) == 0) {
          command_line_args.use_external_index = true;
        } else if (strcmp("generate-url-sorted-doc-mapping", long_opts[long_index].name) == 0) {
          cout << "GenerateUrlSortedDocIdMappingFile(optarg) called" << endl;
          return EXIT_SUCCESS;
        } else if (strcmp("config-options", long_opts[long_index].name) == 0) {
        	OverrideConfigurationOptions(optarg);
        } else if (strcmp("test-compression", long_opts[long_index].name) == 0) {
            cout << "TestCompression() called" << endl;

          return EXIT_SUCCESS;
        } else if (strcmp("test-coder", long_opts[long_index].name) == 0) {
            cout << "TestCoder(optarg) called" << endl;

          return EXIT_SUCCESS;
        }
        break;

      default:
        cout << "SeekHelp() function called" << endl;
        return EXIT_SUCCESS;
    }
  }

  char** input_files = argv + optind;
  int num_input_files = argc - optind;

  switch (command_line_args.role){
    case CommandLineArgs::kLocal:
      switch (command_line_args.mode) {
        // These take an index name as the argument.
        case CommandLineArgs::kCat:
        case CommandLineArgs::kLoopOverIndexData:
        case CommandLineArgs::kQuery:
        case CommandLineArgs::kRetrieveIndexData:
          for (int i = 0; i < num_input_files; ++i) {
            switch (i) {
              case 0:
                command_line_args.index_files1 = ParseIndexName(input_files[i]);
                break;
            }
          }
          break;

        // These take an index name to operate on and an output index name as the arguments.
        case CommandLineArgs::kLayerify:
        case CommandLineArgs::kPrune:
        case CommandLineArgs::kRemap:
          for (int i = 0; i < num_input_files; ++i) {
            switch (i) {
              case 0:
                command_line_args.index_files1 = ParseIndexName(input_files[i]);
                break;
              case 1:
                command_line_args.output_index_prefix = input_files[i];
                break;
            }
          }
          break;

        // These take two index names as the arguments.
        case CommandLineArgs::kDiff:
          for (int i = 0; i < num_input_files; ++i) {
            switch (i) {
              case 0:
                command_line_args.index_files1 = ParseIndexName(input_files[i]);
                break;
              case 1:
                command_line_args.index_files2 = ParseIndexName(input_files[i]);
                break;
            }
          }
          break;

        // These don't take any arguments.
        case CommandLineArgs::kIndex:
        case CommandLineArgs::kMergeInitial:
        case CommandLineArgs::kMergeInput:
        case CommandLineArgs::kNoIdea:
          break;
      }

      Init();
      srand(time(NULL));

      switch (command_line_args.mode) {
        case CommandLineArgs::kIndex:
          //begin the indexing pipeline.
          Index();
          break;
        case CommandLineArgs::kQuery:
          Query();
          break;
        case CommandLineArgs::kMergeInitial:
          MergeInitial();
          break;
        case CommandLineArgs::kMergeInput:
          cout << "MergeInput() function called." << endl;
          break;
        case CommandLineArgs::kRemap:
          cout << "Remap() function called." << endl;
          break;
        case CommandLineArgs::kLayerify:
          Layerify();
          break;
        case CommandLineArgs::kPrune:
          Prune();
          break;
        case CommandLineArgs::kCat:
          Cat();
          break;
        case CommandLineArgs::kDiff:
          Diff();
          break;
        case CommandLineArgs::kRetrieveIndexData:
          cout << "RetrieveIndexData() function called." << endl;
          break;
        case CommandLineArgs::kLoopOverIndexData:
          cout << "LoopOverIndexData() function called." << endl;
          break;
        default:
          cout << "help() function called." << endl;
          break;
      }
      break;
    case CommandLineArgs::kNoRole:
      Help();
      break;
  }
  return EXIT_SUCCESS;
}
