//==============================================================================================================================================================
// Author(s): Roman Khmelichek, Wei Jiang
//==============================================================================================================================================================

#include "polyIRToolkit_Wei.h"

#include <algorithm>
#include <boost/algorithm/string.hpp>
#include <boost/version.hpp>
#include <fstream>
#include <iostream>

// across all servers
// #include <python2.7/Python.h>
// vidaserver1
// #include <python2.6/Python.h>

#include <sstream>
#include <time.h>

using namespace std;

struct CommandLineArgs {

  CommandLineArgs() :
    role(kNoRole),
    mode(kNoIdea),
    merge_degree(0),
    output_index_prefix(NULL),
    term(NULL),
    term_len(0),
    in_memory_index(false),
    memory_mapped_index(false),
    use_external_index(false),
    doc_mapping_file(NULL),
    query_stop_words_list_file(NULL),

    //Already implemented in the command line.
    //original version
    query_algorithm_local(LocalQueryProcessor::kDefault),
    query_mode_local(LocalQueryProcessor::kInteractive),
    result_format_local(LocalQueryProcessor::kNormal){
  }

  ~CommandLineArgs() {
  }

  enum Mode {
    kIndex, kMergeInitial, kMergeInput, kQuery, kRemap, kLayerify, kCat, kDiff, kRetrieveIndexData, kLoopOverIndexData, kNoIdea, kConvert
  };

  enum Role{
    kLocal, kNoRole
  };

  IndexFiles index_files1;
  IndexFiles index_files2;

  Role role;
  Mode mode;


  int merge_degree;

  const char* output_index_prefix;

  const char* term;
  int term_len;

  bool in_memory_index;
  bool memory_mapped_index;

  bool use_external_index;

  const char* doc_mapping_file;

  const char* query_stop_words_list_file;

  // already asscoiated with the command line.
  LocalQueryProcessor::QueryAlgorithm query_algorithm_local;
  LocalQueryProcessor::QueryMode query_mode_local;
  LocalQueryProcessor::ResultFormat result_format_local;
};

static CommandLineArgs command_line_args;

static const char document_collections_doc_id_ranges_filename[] = "document_collections_doc_id_ranges";

// Overrides the options set in the configuration file or adds new options to the configuration as specified on the command line.
// Syntax for 'options': key1=value1;key2=value2;
// Note that each key/value pair must end with a semicolon, except the last pair, which is optional for convenience.
// When entering on the command line, the semicolon char ';' is considered a special character by the shell and so
// must be escaped by prepending a '\' character in front.
// Example:
// $ ./irtk --index --config-options=document_collection_format=trec\;include_positions=false\;new_option=1
void OverrideConfigurationOptions(const string& options) {
  cout << "Overriding the following configuration file options: " << endl;

  size_t option_start = 0;
  size_t option_end = 0;
  size_t last_option_start = 0;

  while ((option_end = options.find(';', option_start)) != string::npos) {
    string key_value = options.substr(option_start, (option_end - option_start));
    ++option_end;
    option_start = option_end;
    last_option_start = option_start;
    SetConfigurationOption(key_value);
  }

  // The only option specified or the last option specified didn't end with a semicolon.
  if (option_start == 0 || option_start != options.size()) {
    string key_value = options.substr(last_option_start);
    SetConfigurationOption(key_value);
  }
}



// Displays common usage information. For more details, the project wiki should be consulted.
void Help() {
  cout << endl;
  cout << "* Version Info *\n";
  cout << "Updated on 2014/03/13 night by Wei Jiang at school" << endl;
  cout << endl;
  cout << "* Quick Start Guide *\n";
  cout << "index usage: 'irtk --local --index'\n";
  cout << "  expects a list of paths to document bundles from stdin\n";
  cout << "\n";
  cout << "merge usage: 'irtk --local --merge'\n";
  cout << "  merges the initial indices generated by the indexing process\n";
  cout << "\n";
  cout << "query usage: 'irtk --local --query'\n";
  cout << "  queries the final index generated by the merging process\n";
  cout << "\n";
  cout << "* Additional Notes *\n";
  cout << "Please see the current latest WIKI page at 'http://code.google.com/p/poly-ir-toolkit-wei-june/w/list' for more detailed usage information." << endl;
  cout << "For problems and feedbacks, please contact 'Wei Jiang' using wj382@nyu.edu" << endl;
  cout << "\n";
}

void LoadUpQueryTermsProbabilityDistribution(map<string,float> &queryTermsProbabilityDistributionMap) {
	string inputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermProbablityDistributionFileName));
    // for debug ONLY
    // cout << "inputFileName:" << inputFileName << endl;
    string currentLine;
	ifstream inputfile(inputFileName.c_str());

	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		      boost::algorithm::trim(currentLine);

			  istringstream iss( currentLine );
		      string term;
		      string probability;
			  iss >> term;
			  iss >> probability;

			  // for debug ONLY
			  // cout << term << " " << probability << endl;

			  // Let's try this first.
			  // If not OK, change to boost or sth.
			  queryTermsProbabilityDistributionMap[term] = atof(probability.c_str());

			  // for debug ONLY
			  // cout << term << " " << queryTermsProbabilityDistributionMap[term] << endl;
		}
	}

	inputfile.close();

    if(queryTermsProbabilityDistributionMap.size() == 0){
	    GetDefaultLogger().Log("Load Query Term Probability Distribution by Our Own Method--- Take Care", false);
    }
    else{
    	//cout << "The length of the queryTerms is:" << queryTerms.size() << endl;
    	//Currently, nothing has been done for this logic.
	    GetDefaultLogger().Log(Stringify(queryTermsProbabilityDistributionMap.size()) + " pairs have been loaded.", false);
    }
}

void Query() {
  if (command_line_args.role == CommandLineArgs::kLocal){
      GetDefaultLogger().Log("Starting query processor with index '" + command_line_args.index_files1.prefix() + "'.", false);

      // index path options:
      // for moa:
      string indexPath0 = "/home/diaosi/workspace/web-search-engine-wei-2014-April/data/Clueweb09_indexes";
      string indexPath1 = "/home/diaosi/workspace/web-search-engine-wei-2014-April/data/Gov2_indexes/prunedGov2IndexBasedOn_PartialBM25/";
      // for pangolin:
      string indexPath4 = "/data/rkhmel01/ir_toolkit";
      string indexPath6 = "/data/obukai/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      // for dodo:
      string indexPath7 = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_Machine_Learned";
      string indexPath8 = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25";
      string indexPath9 = "/home/diaosi/outputDirForIndexes/originalGov2Index/";
      string indexPath12 = "/home/diaosi/web-search-engine-wei/polyIRIndexer/";
      // for vidaserver1
      // local scrach
      string indexPath13 = "/local_scratch/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";
      string indexPath14 = "/local_scratch/wei/workspace/NYU_IRTK/data/outputDirForIndexes/";
      string indexPath16 = "/local_scratch/wei/workspace/NYU_IRTK/data/Gov2_indexes/";
      // on NFS
      string indexPath15 = "/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";
      string indexPath17 = "/home/vgc/wei/workspace/NYU_IRTK/data/GOV2/Gov2_indexes/";
      string indexPath18 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHit_30%_index/LEAVE_index_combined/";
      string indexPath19 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/backupIndex/";
      string indexPath20 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/indexInDevelopment/";
      string indexPath21 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/1PercentageIndex_20140919/";
      string indexPath22 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/70PercentageIndex_20140919/";
      string indexPath23 = indexPath18 + Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTonsOfIndexPath));
      string indexPath24 = "/home/vgc/wei/workspace/NYU_IRTK/data/outputDirForIndexes" + Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTonsOfIndexPath));
      string indexPath25 = "/san_data/research/wei/workspace/NYU_IRTK/data/outputDirForIndexes" + Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTonsOfIndexPath));
      string indexPath26 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/90PercentageIndex_UPP-5_ptPowTo1_20140919";
      string indexPath27 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/100%_gov2_index_for_tiering";
      string indexPath28 = "/home/vgc/wei/workspace/NYU_IRTK/data/outputDirForIndexes/";
      string indexPath29 = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTempIndexPath));
      string indexPath30 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/90PercentageIndex_UPP-5_ptPowTo1_20140919";
      // combination1
      // string indexPath31 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/10%_docHitsIndex/combineIndex/";
      // string indexPath32 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/1PercentageIndex_UPP-5_ptPowTo1_20140919/";
      // combination2
      string indexPath33 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/5%_docHitsIndex/combineIndex/";
      string indexPath34 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/5PercentageIndex_UPP-5_ptPowTo1_20140919";
      string indexPath35 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/index_postingHit_point2MQueries_20141107/";
      string indexPath36 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/index_postingHit_10MQueries_20141107/";
      string indexPath37 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/index_postingHit_50MQueries_20141107/";
      string indexPath38 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHit_1%_index/LEAVE_index_combined/";
      string indexPath39 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/normalizedPostingHit_1%_index/index_0M_1M/";
      string indexPath40 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHit_1%_index/LEAVE_index_combined/";
      string indexPath41 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/normalizedPostingHit_1%_index/LEAVE_index_combined/";
      string indexPath42 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/90PercentageIndex_UPP-5_ptPowTo1_20140919/";
      string indexPath43 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/normalizedPostingHit_2%_index/LEAVE_index_combined/";
      string indexPath44 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHit_5%_index/LEAVE_index_combined/";
      string indexPath45 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/normalizedPostingHit_5%_index/LEAVE_index_combined/";
      string indexPath46 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHit_10%_index/LEAVE_index_combined/";
      string indexPath47 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/docHitsDividedByXdoc_2%";
      string indexPath48 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/docHitsDividedByXdoc_5%";
      string indexPath49 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/1PercentageIndex_UPP-5_ptPowTo1_20140919";
      string indexPath50 = "/san_data/research/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/2PercentageIndex_UPP-5_ptPowTo1_20140919";

      string indexPath51 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/1%/all";
      string indexPath52 = "/san_data/research/wei/workspace/NYU_IRTK/data/GOV2/1%_docHitsDividedBySizeIndex_from_50M_fakeQueries";

      string indexPath53 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/5%/all";
      string indexPath54 = "/san_data/research/wei/workspace/NYU_IRTK/data/GOV2/5%_docHitsDividedBySizeIndex_from_50M_fakeQueries";

      string indexPath55 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/10%/all";
      string indexPath56 = "/san_data/research/wei/workspace/NYU_IRTK/data/GOV2/10%_docHitsDividedBySizeIndex_from_50M_fakeQueries";

      string indexPath57 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/20%/all";
      string indexPath58 = "/san_data/research/wei/workspace/NYU_IRTK/data/GOV2/20%_docHitsDividedBySizeIndex_from_50M_fakeQueries";

      string indexPath59 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/30%/all";
      string indexPath60 = "/san_data/research/wei/workspace/NYU_IRTK/data/GOV2/30%_docHitsDividedBySizeIndex_from_50M_fakeQueries";

      string indexPath61 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/40%/all";
      string indexPath62 = "/san_data/research/wei/workspace/NYU_IRTK/data/GOV2/40%_docHitsDividedBySizeIndex_from_50M_fakeQueries";

      string indexPath63 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/50%/all";
      string indexPath64 = "/san_data/research/wei/workspace/NYU_IRTK/data/GOV2/50%_docHitsDividedBySizeIndex_from_50M_fakeQueries";

      string indexPath65 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/0M_1M_statistics_30%_h_10/5%/all";
      string indexPath66 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/0M_1M_statistics_35%_h_10/5%/all";
      string indexPath67 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/0M_1M_statistics_40%_h_10/5%/all";

      string indexPath68 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/0M_1M_statistics_70%_h_10/5%/all";
      string indexPath69 = "/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/0M_1M_statistics_5%_h_10_top40%_doc/10%/all";
      string indexPath70 = "/media/weijiang/67F904722691B07C/workspace/NYU_IRTK/data/GOV2/Gov2_indexes";
      string indexPath71 = "/home/weijiang/workspace/NYU_IRTK/polyIRToolkit_Wei";
      string indexPath72 = "/home/weijiang/workspace/ORIGINAL_POLY_IRTK/";

      GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath72, false);
      command_line_args.index_files1.SetDirectory(indexPath72);

      LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
                                     command_line_args.query_mode_local, command_line_args.result_format_local);
  }
}

CollectionIndexer& GetCollectionIndexer() {
  static CollectionIndexer collection_indexer;
  return collection_indexer;
}

// This performs the merge for the complete index starting from the initial 0.0 indices.
void MergeInitial() {
  DIR* dir;
  if ((dir = opendir(".")) == NULL) {
    GetErrorLogger().LogErrno("opendir() in MergeInitial(), could not open directory to access files to merge", errno, true);
    return;
  }

  int num_indices = 0;
  struct dirent* entry;
  while ((entry = readdir(dir)) != NULL) {
    const char initial_index_prefix[] = "index.idx.0";  // Just checks for the presence of the index files.
    int idx_file = strncmp(entry->d_name, initial_index_prefix, sizeof(initial_index_prefix) - 1);
    if (idx_file == 0) {
      ++num_indices;
    }
  }

  closedir(dir);

  const int kDefaultMergeDegree = 64;
  const bool kDeleteMergedFiles = Configuration::GetResultValue(Configuration::GetConfiguration().GetBooleanValue(config_properties::kDeleteMergedFiles));
  CollectionMerger merger(num_indices, (command_line_args.merge_degree <= 0 ? kDefaultMergeDegree : command_line_args.merge_degree), kDeleteMergedFiles);
}

void LoadUpQueryTermDocIDPairList(vector<string> &TermDocIDPairList){

      string inputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryIDTermDocIDPairInputFile));
	  string currentLine = "";
	  string currentPair = "";
	  ifstream inputfile(inputFileName.c_str());
	  vector<string> priorityListElements;
	  while ( inputfile.good() )
	  {
		  getline (inputfile,currentLine);
		  if(currentLine != ""){
		      boost::algorithm::trim(currentLine);
		      TermDocIDPairList.push_back( currentLine );
		  }
	  }
	  inputfile.close();

	  if(TermDocIDPairList.size() == 0){
		  GetDefaultLogger().Log("Load query term list is NOT Done", true);
	  }
	  else{
		  GetDefaultLogger().Log(Stringify(TermDocIDPairList.size()) + " pairs have been loaded.", false);
	  }
}

void LoadUpDocSpecifcPositionsPairs(map<string, string> &docLookUpDict) {
    cout << "Load Up Doc Specifc Positions Pairs()" << endl;
    string auxInputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kAuxDocSpecifcPositionFolderFileList));
    string basePath = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kDocSpecifcPositionFolderPath));

    ifstream inputFile1Handler;
    ifstream auxInputFile1Handler;


    auxInputFile1Handler.open(auxInputFileName.c_str());
    //cout << "auxInputFileName:" << auxInputFileName << endl;


	string currentLine;
	ifstream inputfile(auxInputFileName.c_str());

	vector<string> fileNames;
	vector<string>::iterator fileNamesIterator;
	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		//cout << currentLine << endl;
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    //cout << currentLine << endl;
		    fileNames.push_back(currentLine);
		}
	}
	inputfile.close();
	//cout << fileNames.size() << endl;



	string dataInputFileName = "";
	int processingNUmberCounter = 0;
	for(fileNamesIterator = fileNames.begin(); fileNamesIterator != fileNames.end(); fileNamesIterator++)
	{
		processingNUmberCounter ++;
	    dataInputFileName = basePath + *fileNamesIterator;
	    //cout << processingNUmberCounter << " Processing File:" << *fileNamesIterator << endl;
	    //cout << "dataInputFileName:" << dataInputFileName << endl;
		ifstream inputfile2(dataInputFileName.c_str());

		while ( inputfile2.good() )
		{
			getline (inputfile2,currentLine);
			if(currentLine != ""){
			      boost::algorithm::trim(currentLine);
			      //cout << currentLine << endl;
				  istringstream iss( currentLine );
			      string docID;
			      string WARCTRECID;
			      string compressedFileID;
			      string beginningPosition;
			      string endingPosition;
			      string valueString;

				  iss >> docID;
				  iss >> WARCTRECID;
				  iss >> compressedFileID;
				  iss >> beginningPosition;
				  iss >> endingPosition;
				  iss >> valueString;

				  valueString = WARCTRECID + "_" + *fileNamesIterator + "_" + compressedFileID + "_" + beginningPosition + "_" + endingPosition;
			      docLookUpDict[docID] = valueString;
			}
		}
		inputfile2.close();
	}
	cout << "Length of the docLookUpDict:" << docLookUpDict.size() << endl;
	//cout << "small test:" << docLookUpDict["99869"] << endl;
}





void LoadUpCompressedFileIDs(map<string, string> &fileLookUpDict) {
    cout << "Load Up Compressed File IDs" << endl;
    string auxInputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kAuxCompressedFileIDFileList));
    string basePath = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kCompressedFileIDFilePath));

    ifstream inputFile1Handler;
    ifstream auxInputFile1Handler;


    auxInputFile1Handler.open(auxInputFileName.c_str());
    //cout << "auxInputFileName:" << auxInputFileName << endl;


	string currentLine;
	ifstream inputfile(auxInputFileName.c_str());

	vector<string> fileNames;
	vector<string>::iterator fileNamesIterator;
	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		//cout << currentLine << endl;
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    //cout << currentLine << endl;
		    fileNames.push_back(currentLine);
		}
	}
	inputfile.close();
	//cout << fileNames.size() << endl;



	string dataInputFileName = "";
	int processingNUmberCounter = 0;
	for(fileNamesIterator = fileNames.begin(); fileNamesIterator != fileNames.end(); fileNamesIterator++)
	{
		processingNUmberCounter ++;
	    dataInputFileName = basePath + *fileNamesIterator;
	    //cout << processingNUmberCounter << " Processing File:" << *fileNamesIterator << endl;
	    //cout << "dataInputFileName:" << dataInputFileName << endl;
		ifstream inputfile2(dataInputFileName.c_str());



		while ( inputfile2.good() )
		{
			getline (inputfile2,currentLine);
			if(currentLine != ""){
			      boost::algorithm::trim(currentLine);
			      //cout << currentLine << endl;
				  istringstream iss( currentLine );
			      string fileID;
			      string filePath;
			      string valueString;
			      string keyString;

				  iss >> fileID;
				  iss >> filePath;


				  valueString = filePath;
				  keyString = *fileNamesIterator + "_" + fileID;
			      fileLookUpDict[ keyString ] = valueString;
			}
		}
		inputfile2.close();
	}
	cout << "Length of the fileLookUpDict:" << fileLookUpDict.size() << endl;
	//cout << "small test:" << fileLookUpDict[ "en0004_100"] << endl;
}

void LoadUpTerms(vector<string> &terms) {
    cout << "Load Up Terms updated on 20130805 night by Wei" << endl;
    cout << "Logic empty" << endl;
    exit(1);
    /*
    string inputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermsONLYInputfileName));
    cout << "inputFileName:" << inputFileName << endl;

    string currentLine;
	ifstream inputfile(inputFileName.c_str());
	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    terms.push_back(currentLine);
		}
	}
	cout << "# of terms loaded:" << terms.size() << endl;
	inputfile.close();
	*/
}

void LoadUpTerms(map<string,int> &terms, string inputFileName) {
    cout << "Load Up Terms updated on 20130911 morning by Wei at school." << endl;
    cout << "inputFileName: " << inputFileName << endl;
    string currentLine;
	ifstream inputfile(inputFileName.c_str());

	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    terms[currentLine] = 1;
		}
	}
	cout << "The size of the map<string,int> terms:" << terms.size() << endl;
	inputfile.close();
}



void LoadUpLexiconTermsWei20130213(vector<string> &queryTerms) {
    cout << "Load Up Lexicon Terms updated on 20130213" << endl;

    string inputFileName = "/home/diaosi/gov2ClearYourMindAndDoItAgain/lexiconTermsONLY.txt";
    cout << "inputFileName:" << inputFileName << endl;
    string currentLine;
	ifstream inputfile(inputFileName.c_str());

	while ( inputfile.good() )
	{
		getline (inputfile,currentLine);
		if(currentLine != ""){
		    boost::algorithm::trim(currentLine);
		    queryTerms.push_back(currentLine);
		}
	}
	cout << "The length of the queryTerms is:" << queryTerms.size() << endl;
	inputfile.close();
}

string make_the_value_into_string_format(float originalValue){
	  string originalValueInStringFormat = "";
	  stringstream ss (stringstream::in | stringstream::out);
	  ss << originalValue;
	  originalValueInStringFormat = ss.str();
	  return originalValueInStringFormat;
}

string make_the_value_into_string_format(int originalValue){
	  string originalValueInStringFormat = "";
	  stringstream ss (stringstream::in | stringstream::out);
	  ss << originalValue;
	  originalValueInStringFormat = ss.str();
	  return originalValueInStringFormat;
}

void Cat() {
  string flagString = "";
  cout << "value 1: original toolkit function for supporting the commands --cat and --cat-term" << endl;
  cout << "value 2: (STOP USING THIS) ask for one docID as input, retrieve back the whole document and display on the screen effectively.(How to make use of the original web page is the next step. Updated 2012/06/27 by Wei)" << endl;
  cout << "value 3: (STOP USING THIS) this is currently supported for the machine learning research project for positions related features(comment on 2013/01/11: OLD)." << endl;
  cout << "value 4: (STOP USING THIS, USE OPTION 10 instead) ask for multiple query terms as input until BLANK encountered, retrieve back the corresponding inverted lists and apply to the build-in pruning policies(comment on 2013/01/11: maybe useful)." << endl;
  cout << "Value 5: [Utility]output the lexicon term with the length of the list.(updated by Wei on 2014/09/20" << endl;
  cout << "value 6: update the external index given the new score intermediate file( comment on 2013/01/11: whether we need this method is in doubt now)." << endl;
  cout << "value 7: (STOP USING THIS) for the Learning To Prune research project. Updated 2012/09/19 by Wei" << endl;
  cout << "value 8: (STOP USING THIS) the input will be some query terms, output the inverted list of those terms. Updated 2012/11/26 by Wei" << endl;
  cout << "value 9: the input will be a set of indexes, the output will be an aux file that contains two rows, first is the lexicon term, second the freq in the collection. Updated 2012/11/28 by Wei" << endl;
  cout << "value 10: apply ML technique and store the probability back into the external inverted external index. Updated by Wei 20130126" << endl;
  cout << "value 11: run a small python program in C++" << endl;
  cout << "value 12: (STOP USING THIS)output external sorting files for python external sorting. 2013/02/12" << endl;
  cout << "value 13: (STOP USING THIS, cause we already have another random sample method)build a score/probability histogram in main memory and output it to disk. Updated by Wei 2013/08/04" << endl;
  cout << "value 14: Universal cut applying to the original index. Updated by Wei 2013/02/16" << endl;
  cout << "value 15: (test playground)Load the query term probability distribution. Updated by Wei 2013/02/23" << endl;
  cout << "value 16: Updated by Wei 2014/07/18. Given a set of queryTerms, sort the inverted index of each of them, and output the threshold of corresponding percentage cut for the pruning method TCP." << endl;
  cout << "value 17: (STOP USING THIS)build the query view(QV), and put the counter info into the external index. Updated by Wei 2013/02/28" << endl;
  cout << "value 18: run a small program using the 'COMBINATION LIBRARY' developped at poly in C++" << endl;
  cout << "value 19: This is an overall phase2 information generation task.(Updated by Wei on 2013/09/26 morning)" << endl;
  cout << "value 20: (STOP USING THIS) (Prof said there is NO BIG DEAL)The invertion operation of the inverted index." << endl;
  cout << "value 21: This is task for getting the feature: posting_rank_in_the_doc(Updated by Wei 20130714)" << endl;
  cout << "value 22: This is task for generating the Xdoc value for each document in the collection(eg. gov2 dataset)" << endl;
  cout << "value 23: [Utility]This is task for outputting the <docID, trecID, size of the document in words> for gov2/clueweb09B dataset(Updated by Wei 2014/09/07)" << endl;
  cout << "value 24: [Utility]This is task for outputting the impact scores for the randomly selected postings of both datasets(Updated by Wei 2014/06/15 at school)" << endl;
  cout << "value 25: [Utility]This is task for outputting the <docID, # of postings> for gov2/clueweb09B dataset (Updated by Wei 2014/06/07)" << endl;
  cout << "value 26: [Utility]This is task for: Whether the newly generated 64BIT index works OR NOT (Updated by Wei 2014/06/14 at school)" << endl;
  cout << "value 27: [Utility]Compute the posting rank in list and store the posting rank in list in external index(memory mapped). Updated by Wei on 20140609 at school" << endl;
  cout << "value 28: Build a forward index including the value: posting_rank_in_list(currently in test). Updated by Wei on 20130910 night at school" << endl;
  cout << "value 29: Prototyping the posting oriented uniform pruning method. Updated by Wei on 2013/09/12 night at school." << endl;
  cout << "value 30: A small example of showing how much memory it will take to load the longest list info into main memory. Updated by Wei on 2013/09/20 at school." << endl;
  cout << "value 31: [DECREPIT because we do NOT need this analysis any more SINCE 2013/10/22 morning]output the set of documents needed to be parsed given a set of randomly selected postings. For the analysis of the distribution of the maximum potential usefulness. Updated by Wei on 2013/09/25 night at school." << endl;
  cout << "value 32: Documents picking analysis. Whether this alg. is useful or NOT has NOT been tested yet. So, let's test it. Updated on 2013/11/07 night from dodo" << endl;
  cout << "value 33: Document analysis for the parsing performance(OLD and SLOW version) Updated by Wei on 20131105 night at school." << endl;
  cout << "value 34: Pre Document Analysis Stage | Wei | 20140608 | school | IN USE" << endl;
  cout << "value 35: Simple Popping Stage | Wei | 20140608 | school | IN USE" << endl;
  cout << "value 36: Simple example to show that the quantization code from prof actually works. Updated by Wei on 2014/02/19 night at school[STOP this direction currently since 2014/03/02 night]." << endl;
  cout << "value 37: compute the posting rank in list for the clueweb09B dataset. Updated by Wei on 2014/05/15 night at school" << endl;
  cout << "value 38: output the partial impact bm25 score for the gov2 dataset. Updated by Wei on 2014/06/03 night at school" << endl;
  cout << "value 39: output the simple document posting array for clueweb09B dataset" << endl;
  cout << "value 40: traverse the term in lexicon and retrieve the rank in list." << endl;
  cout << "value 41: Updated on 20140906. input: a given index in the pruning output file format. Output: an index that can be queried by the toolkit" << endl;
  cout << "value 42: Updated on 20141003. A procedure to produce the term upper score of a list for easy WAND and MaxScore implementation." << endl;
  cout << "value 43: transfer an OLD 32Bit lexicon to a NEW 64Bit lexicon" << endl;
  cout << "value 44: Let's collect the statistics for the denominator table" << endl;
  cout << "Enter the function value[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44]:";
  getline(cin, flagString);
  boost::algorithm::trim(flagString);

  if (flagString == "1"){
	  IndexCat index_cat(command_line_args.index_files1);
	  index_cat.Cat(command_line_args.term, command_line_args.term_len);
  }
  else if (flagString == "2"){

	  command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/clueweb");

	  // Init some variables to support this operation.
	  map<string, string> docLookUpDict;
	  LoadUpDocSpecifcPositionsPairs(docLookUpDict);

	  map<string, string> fileLookUpDict;
	  LoadUpCompressedFileIDs(fileLookUpDict);

	  IndexCat index_cat(command_line_args.index_files1);
	  string previousCompressedFilePath = "";

	  while (true){
		  cout << "Enter docID[0,1022253]:";
		  string docIDSearchFor = "";
		  getline(cin, docIDSearchFor);
		  boost::algorithm::trim(docIDSearchFor);

		  int value = -1;

		  try
		  {
			  value = atoi(docIDSearchFor.c_str());
			  if ( value >= 0 && value <= 1022253){
				  index_cat.Cat(docIDSearchFor, docLookUpDict, fileLookUpDict,previousCompressedFilePath);
			  }
			  else{
				  cout << "The docID you entered is out of range. Please try again." << endl;
			  }
		  }
		  catch(int e)
		  {
			   cout << "The string you entered is NOT an legal number." << endl;
		  }
	  }
  }
  else if (flagString == "3"){
	  string outputFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kQueryTermDocIDPairPositionsOutputFile));

	  ofstream outputFileHandler(outputFileName.c_str());

	  vector<string> queryID_Term_docIDList;
	  LoadUpQueryTermDocIDPairList(queryID_Term_docIDList);

	  map<string, string> docLookUpDict;
	  LoadUpDocSpecifcPositionsPairs(docLookUpDict);

	  map<string, string> fileLookUpDict;
	  LoadUpCompressedFileIDs(fileLookUpDict);

	  string previousCompressedFilePath = "";


	  int counter = 0;
	  int numberOfDocNotFound = 0;

	  //init for the currentDocID,begin.
	  istringstream iss(queryID_Term_docIDList[0]);
      string queryNumber;
      string queryTerm;
      string docID = "";

	  iss >> queryNumber;
	  iss >> queryTerm;
	  iss >> docID;
	  string currentDocID = docID;
	  //init for the currentDocID,end.

	  vector<string> termNumberList;
	  for( unsigned int i = 0; i < queryID_Term_docIDList.size(); i++ ) {
		  //let's divide the term list based on docID.
		  istringstream iss(queryID_Term_docIDList[i]);
	      string queryNumber;
	      string queryTerm;
	      string docID = "";

		  iss >> queryNumber;
		  iss >> queryTerm;
		  iss >> docID;
		  if(currentDocID != docID){
			  //cat the things and init the whole things.
			  IndexCat index_cat(command_line_args.index_files1);
			  GetDefaultLogger().Log("Processing " + Stringify(termNumberList.size() ) + " Term, Query number pair in docID:" +  currentDocID, false);
			  //cout << "In:" << currentDocID << endl;
			  index_cat.Cat(flagString, outputFileHandler, termNumberList, currentDocID, docLookUpDict, fileLookUpDict,previousCompressedFilePath,numberOfDocNotFound);
			  //cout << "Out:" << currentDocID << endl;
			  currentDocID = "";
			  termNumberList.clear();

			  currentDocID = docID;
			  termNumberList.push_back(queryNumber + "_" + queryTerm);
		  }
		  else{
			  termNumberList.push_back(queryNumber + "_" + queryTerm);
		  }
		  counter ++;
		  cout << endl;
	  }
	  //wrap up. Process the final step.
	  IndexCat index_cat(command_line_args.index_files1);
	  GetDefaultLogger().Log("Processing " + Stringify(termNumberList.size() ) + " Term, Query number pair in docID:" +  currentDocID, false);
	  index_cat.Cat(flagString, outputFileHandler, termNumberList, currentDocID, docLookUpDict, fileLookUpDict,previousCompressedFilePath,numberOfDocNotFound);
	  currentDocID = "";
	  termNumberList.clear();

	  GetDefaultLogger().Log(Stringify(counter) + " pairs have been processed.", false);
	  GetDefaultLogger().Log(Stringify(numberOfDocNotFound) + " docs are not found.", false);

	  outputFileHandler.close();

  }
  else if (flagString == "4"){
	  cout << "Updated by wei 2013/02/23" << endl;
	  cout << "This function has been very OLD and please do NOT use it anymore." << endl;
	  cout << "The logic has been completely commented out" << endl;
	  /*
	  string flagString2 = "";
	  cout << "value 1: Run build-in example with query terms provided in a plain file." << endl;
	  cout << "Enter the function value[1]:";


	  //for user to enter:
	  //getline(cin, flagString2);
	  //for debug:
	  flagString2 = "1";
	  cout << "You entered:" << flagString2 << endl;

	  boost::algorithm::trim(flagString2);


	  vector<string> queryTerms;

	  if (flagString2 == "1"){
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // 362.53 seconds for 23 query terms(without scores stored).
		  // 439.084 seconds for 23 query terms(with scores stored).

		  // Load the complete query terms for evaluation, for production but for test.
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  //string queryTest0 = "snuff";
		  //string queryTest1 = "snyder";
		  //string queryTest2 = "so";
		  //string queryTest3 = "soalr";
		  //string queryTest4 = "soap";

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  string queryTerm0 = "00000000000000000000"; //70 postings.
		  //string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  //queryTerms.push_back(queryTest0);
		  //queryTerms.push_back(queryTest1);
		  //queryTerms.push_back(queryTest2);
		  //queryTerms.push_back(queryTest3);
		  //queryTerms.push_back(queryTest4);

		  queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  queryTerms.push_back(queryTerm2);
		  //queryTerms.push_back(queryTerm3);
		  //queryTerms.push_back(queryTerm4);
		  //queryTerms.push_back(queryTerm5);

	  }
	  else if (flagString2 == "2"){
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }
	  else{
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }

	  // Actual doing the pruning here.
	  for(unsigned int i = 0; i < queryTerms.size(); i++)
	  {
		  cout << "query term " << i << ":"<< queryTerms[i] << endl;
	  }

	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/11
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "wei_pruning_development_" + string(buff) + "-100%";


	  // Lots of common between pruning and layering project. From now, we use the class LayeredIndexGenerator to do pruning as well.
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  //TODO: ??? the output meta index, some numbers are not that consistent with the original one.
	  // The 1st argument is the vector queryTerms which have all the query terms ready for pruning.
	  // The 2ed argument is the bool argument of the debugFlag. True for opening the debugging mode, false for closing the debugging mode.
	  // The 3rd argument is the bool argument of the switch for storing the computed scores into external index. True for yes and false for no.
	  cout << "store the external score into the index set to false" << endl;
	  layered_index_generator.CreatePrunedIndexForMultipleTerms(queryTerms, false, false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  */
  }
  else if (flagString == "5"){
	  // 1% index for the docHit
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // command_line_args.index_files1.SetDirectory("/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/1%_docHitsIndex/combineIndex/");
	  // 50% index for the docHit
	  // command_line_args.index_files1 = ParseIndexName("index:0.0");
	  // command_line_args.index_files1.SetDirectory("/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/50%_docHitsIndex/combineIndex/");
	  // 10% index for the UPP-5 method
	  // command_line_args.index_files1 = ParseIndexName("index:0.0");
	  // command_line_args.index_files1.SetDirectory("/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/tonsOfIndexesMaking_20140920/10PercentageIndex_UPP-5_ptPowTo1_20140919/");
	  command_line_args.index_files1 = ParseIndexName("index");
	  // command_line_args.index_files1.SetDirectory("/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/normalizedPostingHit_1%_index/LEAVE_index_combined/");
	  command_line_args.index_files1.SetDirectory("/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHit_1%_index/LEAVE_index_combined/");
	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "wei_pruning_development_" + string(buff) + "-90%";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
	  layered_index_generator.OutputTermAndTermListLength();
  }
  else if (flagString == "6"){
	    cout << "step1: Update the target external index " << endl;
	    cout << "This is very interesting logic but currently have been blocked due to long history." << endl;
	    exit(1);
	    /*
	    command_line_args.index_files1 = ParseIndexName("completedQueryTermsForGOV2EfficiencyTaskWithWrongScoresStored4Part1");
	    command_line_args.index_files1.SetDirectory("/data5/team/weijiang/compatibleIndexesWithIRTK/gov2");
		GetDefaultLogger().Log("Starting local query processor with index '" + command_line_args.index_files1.prefix() + "'.", false);
		LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
				command_line_args.query_mode_local, command_line_args.result_format_local,"auto");
		query_processor.UpdateExternalIndexFromHardDrive();
		*/
  }
  else if (flagString == "7"){
	    cout << "NO logic here, STOP using this function" << endl;
	    /*
	    cout << "for the Learning To Prune research project. Output a training file which contains both the good and bad training examples for weka.(head -1000 queries, top100 results with their postings. The same amount as the complementary.)" << endl;
	    command_line_args.index_files1 = ParseIndexName("completedQueryTermsForGOV2EfficiencyTaskWithWrongScoresStored4Part1");
	    command_line_args.index_files1.SetDirectory("/data5/team/weijiang/compatibleIndexesWithIRTK/gov2");
		GetDefaultLogger().Log("Starting local query processor with index '" + command_line_args.index_files1.prefix() + "'.", false);
	    LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
	                                     command_line_args.query_mode_local, command_line_args.result_format_local);
	    */
  }
  else if (flagString == "8"){
	  cout << "NO logic here, STOP using this function" << endl;
	  /*
	  cout << "The input will be a set of 1 term queries, the output will be a set of files, for each line, will have the following format: qid:term (docID1,partialBM25Score1),(docID2,partialBM25Score2) ..." << endl;
	  command_line_args.index_files1 = ParseIndexName("index");
	  command_line_args.index_files1.SetDirectory("/data/rkhmel01/ir_toolkit");
	  LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
									 command_line_args.query_mode_local, command_line_args.result_format_local);
	  */
  }
  else if (flagString == "9"){
	  cout << "The input will be some set of compatible indexes, the output will be a file containing all the lexicons with its lexicon_freq_in_collection." << endl;
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  // Lots of common between pruning and layering project. From now, we use the class LayeredIndexGenerator to do pruning as well.
	  // Here, I just borrow the function called CreatePrunedIndexAuxInfo()
	  // There is NO USE for this variable output_index_predix

	  // option1:
	  // const char* output_index_prefix = (command_line_args.output_index_prefix != NULL ? command_line_args.output_index_prefix : "index_layered");

	  // option2:
	  const string output_index_prefix = "wei_testing_NO_USE";
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1,output_index_prefix);
	  layered_index_generator.OutputTermAndTermListLength();
  }
  else if (flagString == "10"){
	  cout << "apply ML technique and store the probability into the inverted external index";
	  std::cout << "Storing Procedure Begins..." << std::endl;

	  // ****************************************************************************************
	  // the original pruning logic

	  string flagString2 = "";
	  cout << "value 1: Run build-in example with query terms provided in a plain file." << endl;
	  cout << "Enter the function value[1]:";


	  //for user to enter:
	  //getline(cin, flagString2);
	  //for debug:
	  flagString2 = "1";
	  cout << "You entered:" << flagString2 << endl;

	  boost::algorithm::trim(flagString2);

	  vector<string> queryTerms;
	  float percentageToKeepOfTheWholeIndex = 1.0;
	  int pruningMethodCodeOfTheWholeIndex = 5;


	  if (flagString2 == "1"){
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // 362.53 seconds for 23 query terms(without scores stored).
		  // 439.084 seconds for 23 query terms(with scores stored).

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  // string queryTest0 = "snuff";
		  // string queryTest1 = "snyder";
		  // string queryTest2 = "so";
		  string queryTest3 = "soalr";
		  // string queryTest4 = "soap";

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  // string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";
		  // string queryTerm9 = "1031b"; // 27 postings 30
		  // string queryTerm10 = "10cs"; // 19 postings 30

		  // queryTerms.push_back(queryTest0);
		  // queryTerms.push_back(queryTest1);
		  // queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  // queryTerms.push_back(queryTest4);

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);
		  // queryTerms.push_back(queryTerm9);
		  // queryTerms.push_back(queryTerm10);

	  }
	  else if (flagString2 == "2"){
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }
	  else{
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }



	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/24
	  // OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

	  // NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "wei_pruning_development_universal_cutting_" + string(buff) + "-" + make_the_value_into_string_format(percentageToKeepOfTheWholeIndex) + "-" + make_the_value_into_string_format(pruningMethodCodeOfTheWholeIndex);


	  // Lots of common between the pruning and layering project.
	  // From now, we use the class LayeredIndexGenerator to do pruning as well.
	  // Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  //TODO: ??? the output meta index, some numbers are not that consistent with the original one.
	  // The 1st argument is the vector queryTerms which have all the query terms ready for pruning.
	  // The 2ed argument is the bool argument of the debugFlag. True for opening the debugging mode, false for closing the debugging mode.
	  // The 3rd argument is the bool argument of the switch for storing the computed scores into external index. True for yes and false for no.
	  // No matter what, the external index will be generated.
	  // If the flag set to false, it will ONLY store (1)chunk max score and (2)block max score
	  // If the flag set to true, it will ALSO store (2) the aux info for each posting which is very useful in the context of pruning
	  // Updated by Wei 2013/01/26, I think I am almost there and just keep going.
	  // Notes: Let's try this today:2013/01/26
	  cout << "store the external score into the index set to be true" << endl;

	  // old version, prune based on each inverted list
	  // layered_index_generator.CreatePrunedIndexForMultipleTerms(queryTerms, false, true);

	  // current version, prune based on universal importance of this posting

	  map<string,float> queryTermsProbabilityDistributionMap;
	  LoadUpQueryTermsProbabilityDistribution(queryTermsProbabilityDistributionMap);

	  /*
	  for(map<string,float>::iterator queryTermsProbabilityDistributionMapIterator = queryTermsProbabilityDistributionMap.begin(); queryTermsProbabilityDistributionMapIterator != queryTermsProbabilityDistributionMap.end(); queryTermsProbabilityDistributionMapIterator++){
		  cout << queryTermsProbabilityDistributionMapIterator->first << " " << queryTermsProbabilityDistributionMapIterator->second << endl;
	  }
	  */


	  layered_index_generator.CreatePrunedIndexForMultipleTerms(queryTerms, true, true, percentageToKeepOfTheWholeIndex, pruningMethodCodeOfTheWholeIndex,queryTermsProbabilityDistributionMap);

	  // old and NOT flexible version, do NOT use anymore
	  // layered_index_generator.CreatePrunedIndexForMultipleTermsBasedOnUniversalScoreImportanceOLDAndNotUsed(queryTerms, false, true, percentageToKeepOfTheWholeIndex, pruningMethodCodeOfTheWholeIndex);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;

	  // ****************************************************************************************

	  std::cout << "Storing Procedure Ends." << std::endl;
  }
  else if (flagString == "11"){
	    cout << "Updated by Wei on 2014/06/15" << endl;
	    cout << "DO NOT use python in toolkit anymore" << endl;
	    exit(1);

	    /*
	    // Remember to issue the following command before calling your own python module.
	    // export PYTHONPATH=${PYTHONPATH}:./
	  	PyObject *pName, *pModule, *pFunc;
	    PyObject *pArgs, *pValue;

	    Py_Initialize();
	    // option1
	    // pName = PyString_FromString("os");

	    // option2 (NOT working)
	    // pName = PyString_FromString("/home/obukai/workspace/polyIRToolkit/polyIRIndexer/simplePythonPlayGround");

	    // option3
	    pName = PyString_FromString("pythonModuleForCallingFromC");

	    // Error checking of pName left out

	    pModule = PyImport_Import(pName);
	    Py_DECREF(pName);

		PyRun_SimpleString("from time import time,ctime\n"
						   "print 'Today is',ctime(time())\n");

	    if (pModule != NULL) {
	    	// test1: call the function pythonModuleForCallingFromC_test
	        pFunc = PyObject_GetAttrString(pModule, "pythonModuleForCallingFromC_hello");
	        // pFunc is a new reference

	        if (pFunc && PyCallable_Check(pFunc)) {
	            pArgs = PyTuple_New(0);
	            pValue = PyObject_CallObject(pFunc, pArgs);
	            Py_DECREF(pArgs);
	            if (pValue != NULL) {
	                Py_DECREF(pValue);
	            }
	            else {
	                Py_DECREF(pFunc);
	                Py_DECREF(pModule);
	                PyErr_Print();
	                fprintf(stderr,"Call failed\n");
	            }
	        }
	        else {
	            if (PyErr_Occurred())
	                PyErr_Print();
	            fprintf(stderr, "Cannot find the function");
	        }
	        Py_XDECREF(pFunc);

	        // test2: call the function pythonModuleForCallingFromC_loadTheAuxInfoIntoMemory
	        pFunc = PyObject_GetAttrString(pModule, "pythonModuleForCallingFromC_loadTheAuxInfoIntoMemory");
	        // pFunc is a new reference

	        if (pFunc && PyCallable_Check(pFunc)) {
	            pArgs = PyTuple_New(0);

	            pValue = PyObject_CallObject(pFunc, pArgs);
	            Py_DECREF(pArgs);
	            if (pValue != NULL) {
	                printf("Result of call: %ld\n", PyInt_AsLong(pValue));
	                Py_DECREF(pValue);
	            }
	            else {
	                Py_DECREF(pFunc);
	                Py_DECREF(pModule);
	                PyErr_Print();
	                fprintf(stderr,"Call failed\n");

	            }
	        }
	        else {
	            if (PyErr_Occurred())
	                PyErr_Print();
	            fprintf(stderr, "Cannot find the function");
	        }
	        Py_XDECREF(pFunc);


	        // final step: delete the module
	        Py_DECREF(pModule);
	    }
	    else {
	        PyErr_Print();
	        fprintf(stderr, "Failed to load the module");
	    }
	    Py_Finalize();
	    */
  }
  else if (flagString == "12"){
	  cout << "output external sorting files for python external sorting" << endl;
	  cout << "Storing Procedure Begins..." << std::endl;

	  // ****************************************************************************************
	  // the original pruning logic

	  int sortingMethodCodeForTheTerm = 4;

	  string flagString2 = "";
	  cout << "value 1: Run build-in example with query terms provided in a plain file." << endl;
	  cout << "Enter the function value[1]:";


	  //for user to enter:
	  //getline(cin, flagString2);
	  //for debug:
	  flagString2 = "1";
	  cout << "You entered:" << flagString2 << endl;

	  boost::algorithm::trim(flagString2);

	  vector<string> queryTerms;

	  if (flagString2 == "1"){
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // 362.53 seconds for 23 query terms(without scores stored).
		  // 439.084 seconds for 23 query terms(with scores stored).

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);


		  LoadUpLexiconTermsWei20130213(queryTerms);

		  // updated by Wei 2013/02/13 add more sensitive terms to do test

		  /*
		  string queryTest0 = "snuff";	// 3854 postings.
		  string queryTest1 = "snyder";	// 47802 postings.
		  string queryTest2 = "so";	// 3219389 postings.
		  string queryTest3 = "soalr";	// 10 postings.
		  string queryTest4 = "soap";	// 175292 postings.

		  string queryTest5 = "0";	// 8400333 postings.
		  string queryTest6 = "113";	// 599688 postings.
		  string queryTest7 = "2";	// 10966214 postings.
		  */



		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  // string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";

		  /*
		  queryTerms.push_back(queryTest0);
		  queryTerms.push_back(queryTest1);
		  queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  queryTerms.push_back(queryTest4);

		  queryTerms.push_back(queryTest5);
		  queryTerms.push_back(queryTest6);
		  queryTerms.push_back(queryTest7);
		  */

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);

	  }
	  else if (flagString2 == "2"){
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }
	  else{
		  cout << "Not supported operation." << endl;
		  exit(0);
	  }

	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/24
	  // OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

	  // NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  IndexCat index_cat(command_line_args.index_files1);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";
	  // Lots of common between the pruning and layering project.
	  // From now, we use the class LayeredIndexGenerator to do pruning as well.
	  // Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
	  Timer pruning_time;
	  layered_index_generator.CreateExternalScoreFileForEachQueryTerm(queryTerms, false, sortingMethodCodeForTheTerm);
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Storing Procedure Ends." << std::endl;
  }
  else if (flagString == "13"){
	  cout << "build a score/probability histogram in main memory and output it to disk" << endl;
	  cout << "(1) I only do the histogram based on a certain set of terms(sampling)(select some terms that can represent the whole lexicon). Currently, I just select some query terms from the head 95K training query log" << endl;
	  cout << "(2) I can use the in memory histogram to count things (Proposed by Prof)" << endl;
	  cout << "Building score/value Histogram Begins..." << std::endl;

	  // The meaning of different sortingMethodCodeForTheTerm
	  // (needed to be filled completely)
	  // sortingMethodCodeForTheTerm = 1
	  // sortingMethodCodeForTheTerm = 2
	  // sortingMethodCodeForTheTerm = 3
	  // sortingMethodCodeForTheTerm = 4
	  // sortingMethodCodeForTheTerm = 5 Using our current MLed model
	  int sortingMethodCodeForTheTerm = 5;

	  vector<string> terms;
	  // Load Up some terms into the variable queryTerms
	  // LoadUpTerms(terms);

	  // updated by Wei 2013/02/13 add more typical terms to do test
	  // string queryTest0 = "snuff";	// 3854 postings.
	  // string queryTest1 = "snyder";	// 47802 postings.
	  // string queryTest2 = "so";	// 3219389 postings.
	  // string queryTest3 = "soalr";	// 10 postings.
	  // string queryTest4 = "soap";	// 175292 postings.

	  // string queryTest5 = "0";	// 8400333 postings.
	  // string queryTest6 = "113";	// 599688 postings.
	  // string queryTest7 = "2";	// 10966214 postings.




	  // 00000000000000000000 0000000000000000000

	  // ideal for debugging, mark1
	  // string queryTerm0 = "00000000000000000000"; //70 postings.
	  // string queryTerm1 = "0"; //8400333 postings.

	  // ideal for debugging, mark2
	  // string queryTerm2 = "0000000000000000000"; //46 postings.
	  //string queryTerm3 = "000000000000000000"; //82 postings.
	  //string queryTerm4 = "0000000000000000"; //251 postings.
	  //string queryTerm5 = "00000000000000000"; //87 postings.

	  // ideal for debugging, mark3, 2 postings
	  // string queryTerm6 = "000sites";

	  // ideal for debugging, mark4
	  // string queryTerm7 = "00wc";
	  // string queryTerm8 = "03255";


	  // queryTerms.push_back(queryTest0);
	  // queryTerms.push_back(queryTest1);
	  // queryTerms.push_back(queryTest2);
	  // queryTerms.push_back(queryTest3);
	  // queryTerms.push_back(queryTest4);

	  // queryTerms.push_back(queryTest5);
	  // queryTerms.push_back(queryTest6);
	  // queryTerms.push_back(queryTest7);


	  // queryTerms.push_back(queryTerm0);
	  // queryTerms.push_back(queryTerm1);
	  // queryTerms.push_back(queryTerm2);
	  // queryTerms.push_back(queryTerm3);
	  // queryTerms.push_back(queryTerm4);
	  // queryTerms.push_back(queryTerm5);
	  // queryTerms.push_back(queryTerm6);
	  // queryTerms.push_back(queryTerm7);
	  // queryTerms.push_back(queryTerm8);

	  // option1: months ago
	  // command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

	  // option2: updated 2013/01/24
	  // OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

	  // NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");

	  // For the machine pangolin:
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
	  // For the machine dodo:
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  // OLD 32bit version in order to prune the original index
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");

	  // ready to DELETE on 2013/08/05 night
	  // IndexCat index_cat(command_line_args.index_files1);

	  // OLD version, dumpped on 2013/08/04 afternoon by Wei
	  // LoadUpQueryTermsProbabilityDistribution_Advance(queryTermsTrueProbabilityDistributionMap,queryTerms1DProbabilityDistributionMap,queryTerms2DProbabilityDistributionMap,queryTermsGoodTuringProbabilityDistributionMap);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  layered_index_generator.LoadUpThreeFeatureValuesForMachineLearnedTraining();

	  // Updated by Wei 2013/08/05 night
	  // for the first probability factor
	  // aux maps for the first probability factor
	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();
	  // for the second probablity factor
	  // aux maps for the second probablity factor
	  layered_index_generator.LoadUpAuxFilesForSecondProbabilityFactor();


	  // Updated by Wei 2013/08/04 afternoon.
	  // I personally think that this function CreateHistogram do NOT need to take too long.
	  // Meaning do not need to take too much terms into consideration. Usually, tens of terms will be fine and do not need to take half a day to do this

	  // current version
	  layered_index_generator.CreateHistogram(terms, true, sortingMethodCodeForTheTerm);

	  // OLD version, dumpped on 2013/08/04 afternoon by Wei
	  // layered_index_generator.CreateHistogram(terms, true, sortingMethodCodeForTheTerm,queryTermsTrueProbabilityDistributionMap,queryTerms1DProbabilityDistributionMap,queryTerms2DProbabilityDistributionMap,queryTermsGoodTuringProbabilityDistributionMap);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Building score/value Histogram Ends." << std::endl;
  }
  else if (flagString == "14"){
	    cout << "real cut to the original index by universal importance score" << endl;
	    cout << "universal cut begins..." << endl;

		vector<string> queryTerms;
		float percentageToKeepOfTheWholeIndex = 1.0;
		int pruningMethodCodeOfTheWholeIndex = 2;
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // NONE

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  // Load the complete query terms for evaluation, for production
		  // LoadUpLexiconTermsWei20130213(queryTerms);

		  // updated by Wei 2013/02/13 add more sensitive terms to do test

		  // string queryTest0 = "snuff";	// 3854 postings.
		  // string queryTest1 = "snyder";	// 47802 postings.
		  // string queryTest2 = "so";	// 3219389 postings.
		  string queryTest3 = "soalr";	// 10 postings.
		  // string queryTest4 = "soap";	// 175292 postings.

		  // string queryTest5 = "0";	// 8400333 postings.
		  // string queryTest6 = "113";	// 599688 postings.
		  // string queryTest7 = "2";	// 10966214 postings.

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  //string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";

		  // queryTerms.push_back(queryTest0);
		  // queryTerms.push_back(queryTest1);
		  // queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  // queryTerms.push_back(queryTest4);

		  // queryTerms.push_back(queryTest5);
		  // queryTerms.push_back(queryTest6);
		  // queryTerms.push_back(queryTest7);

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);



		// option1: months ago
		// command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

		// option2: updated 2013/01/24
		// OLD 32bit version
		// command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

		// NEW 64bit version
		command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
		// for the machine pangolin
		command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
		// for the machine dodo
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		// OLD 32bit version in order to prune the original index
		// option1 (32bit lexicon with the original index):
		// command_line_args.index_files1 = ParseIndexName("index");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");

		// option2 (32bit lexicon only the query terms):
		// command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-27-21-47-55-100%");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		IndexCat index_cat(command_line_args.index_files1);

		time_t now;
		now = time(NULL);
		char buff[20];
		strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));

		const string output_index_prefix = "wei_pruning_development_universal_cutting_" + string(buff) + "-" + make_the_value_into_string_format(percentageToKeepOfTheWholeIndex) + "-" + make_the_value_into_string_format(pruningMethodCodeOfTheWholeIndex);
		// Lots of common between the pruning and layering project.
		// From now, we use the class LayeredIndexGenerator to do pruning as well.
		// Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
		LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
		Timer pruning_time;
		layered_index_generator.CutBasedOnUniversalImportanceScore(queryTerms, false, true, percentageToKeepOfTheWholeIndex, pruningMethodCodeOfTheWholeIndex);
		GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
		cout << "The output_index_prefix: " << output_index_prefix << endl;
	    cout << "universal cut ends." << endl;
  }
  else if (flagString == "15"){
	  cout << "Load the query term probability distribution begins..." << endl;
	  map<string,float> queryTermsProbabilityDistributionMap;

	  LoadUpQueryTermsProbabilityDistribution(queryTermsProbabilityDistributionMap);

	  for(map<string,float>::iterator queryTermsProbabilityDistributionMapIterator = queryTermsProbabilityDistributionMap.begin(); queryTermsProbabilityDistributionMapIterator != queryTermsProbabilityDistributionMap.end(); queryTermsProbabilityDistributionMapIterator++){
		  cout << queryTermsProbabilityDistributionMapIterator->first << " " << queryTermsProbabilityDistributionMapIterator->second << endl;
	  }

	  cout << "Load the query term probability distribution ends." << endl;
  }
  else if (flagString == "16"){
	    cout << "Given a set of queryTerms, sort the inverted index of each of them, and output the threshold of corresponding percentage cut for the pruning method TCP." << endl;
	    cout << "procedure begins..." << endl;

	    // for the gov2
	    // this index includes all the terms from the lexicon
	    // string indexName = "index:0.0";
	    // for vidaserver1 NFS:
	    // string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/indexInDevelopment";

	    // for the gov2
	    // this index includes all the terms from the lexicon
	    string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	    // for vidaserver1 NFS:
	    string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Gov2_indexes";

	    // for the clueweb09B
	    // this index includes all the terms from the lexicon
	    // string indexName = "LEAVE_wei_uniform_pruning_2014-06-09-19-16-46_None_None";
	    // for vidaserver1 NFS:
	    // string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";

		command_line_args.index_files1 = ParseIndexName(indexName.c_str());
		command_line_args.index_files1.SetDirectory(indexPath.c_str());

		IndexCat index_cat(command_line_args.index_files1);

		time_t now;
		now = time(NULL);
		char buff[20];
		strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));

		const string output_index_prefix = "wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";
		// Lots of common between the pruning and layering project.
		// From now, we use the class LayeredIndexGenerator to do pruning as well.
		// Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
		LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
		Timer pruning_time;

		// by percentage
		// layered_index_generator.CreateCutThresholdOfEachTermBasedOnPercentageForMultipleTerms();
		// by 1,5,10,20,50,100,500,1000,5000,10000
		layered_index_generator.CreateCutThresholdOfEachTermBasedOnPercentageForMultipleTerms2();

		GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
		cout << "The output_index_prefix: " << output_index_prefix << endl;

	    cout << "procedure ends." << endl;
  }
  else if (flagString == "17"){
	  cout << "build the query view" << endl;
	  std::cout << "Building the query view(QV) Procedure Begins..." << std::endl;

	  // Updated by Wei 2013/02/28
	  // maybe this is NOT the good place to build the query view. Can we try to build the query view from the query processor?

	  std::cout << "Building the query view(QV) Procedure Ends." << std::endl;
  }
  else if (flagString == "18"){
	  cout << "run a program using the boost combinations library(made by OUR UNIVERSITY)" << endl;

	  const int r = 2;
	  const int n = 4;
	  std::vector<int> v_int(n);

	  for (int i = 0; i < n; ++i) { v_int[i] = i; }

	  /*
	  v_int[3] = 3;
	  v_int[2] = 2;
	  v_int[1] = 1;
	  v_int[0] = 0;
	  */

	  int N = 0;
	  do {
	      ++N;
	      if (N < 10 || N > 117) {
	          std::cout << "[ " << v_int[0];
	          for (int j = 1; j < r; ++j) { std::cout << ", " << v_int[j]; }
	          std::cout << " ]" << std::endl;
	      } else if (N == 10) {
	          std::cout << "  . . ." << std::endl;
	      }
	  } while (next_combination(v_int.begin(), v_int.begin() + r, v_int.end()));
	  std::cout << "Found " << N << " combinations of size " << r << " without repetitions"
	            << " from a set of " << n << " elements." << std::endl;

  }
  else if (flagString == "19"){
	  cout << "(1) Given a set of documents(sorted by their trecIDs) (done)"
			  "(2) Decompress the corresponding compress file into main memory (done)"
			  "(3) Extract the corresponding edges of the specified web pages (done)"
			  "(4) Dump this set of edges into disk (done)"
			  "(5) Record the beginning position and the ending position of each document in the compress file(NOT done)" << endl;



	  // step2: decompress the specific file and get the information
	  GetDefaultLogger().Log("Document collection information extraction...", false);

	  // Get collection indexer in order to do indexing on the specific documents.
	  CollectionIndexer& collection_indexer = GetCollectionIndexer();

	  // Input to the indexer is a list of document collection files we want to index in order.
	  // Deal with the input file.
	  // The cin object will automatically connect with the command line arguments. "< fileName"
	  collection_indexer.ProcessDocumentCollections(cin);


	  // Start timing indexing process.
	  Timer index_time;

	  //All the things have been done in this function,including the parsing stage, the posting collection stage and the index builder stage.
	  collection_indexer.ParseDocumentCollectionsAndExtractingInfoForPhase2Pruning();

	  // for debug ONLY
	  // cout << "total number of documents indexed(mark1): " << collection_indexer.doc_id() << endl;
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
	  // End timing indexing process.
  }
  else if (flagString == "20"){
	    cout << "Updated by Wei 2013/03/21" << endl;
	    cout << "Cause Prof think this is just a small part, let's shit to a more fast and flexible way of solving this problem." << endl;
	    cout << "PLEASE DO NOT DEVELOP THIS FUNCTION ANY MORE. TRY TO FIND A BAND NEW WAY TO DO THIS" << endl;

	    cout << "The invertion of the original gov2 inverted index" << endl;
	    cout << "The input will be the original gov2 inverted index" << endl;
	    cout << "THe output will be the forward index, maintaining the same info as the inverted index" << endl;
	    cout << "forward index building begins..." << endl;

		vector<string> queryTerms;
		  // Notes:
		  // Some statistics about time of generating the pruned index.
		  // NONE

		  // Load the complete query terms for evaluation, for production
		  // LoadUpQueryTermsWei20120711(queryTerms);

		  // Load the complete query terms for evaluation, for production
		  // LoadUpLexiconTermsWei20130213(queryTerms);

		  // updated by Wei 2013/02/13 add more sensitive terms to do test

		  // string queryTest0 = "snuff";	// 3854 postings.
		  // string queryTest1 = "snyder";	// 47802 postings.
		  // string queryTest2 = "so";	// 3219389 postings.
		  string queryTest3 = "soalr";	// 10 postings.
		  // string queryTest4 = "soap";	// 175292 postings.

		  // string queryTest5 = "0";	// 8400333 postings.
		  // string queryTest6 = "113";	// 599688 postings.
		  // string queryTest7 = "2";	// 10966214 postings.

		  // 00000000000000000000 0000000000000000000

		  // ideal for debugging, mark1
		  // string queryTerm0 = "00000000000000000000"; //70 postings.
		  // string queryTerm1 = "0"; //8400333 postings.

		  // ideal for debugging, mark2
		  //string queryTerm2 = "0000000000000000000"; //46 postings.
		  //string queryTerm3 = "000000000000000000"; //82 postings.
		  //string queryTerm4 = "0000000000000000"; //251 postings.
		  //string queryTerm5 = "00000000000000000"; //87 postings.

		  // ideal for debugging, mark3, 2 postings
		  // string queryTerm6 = "000sites";

		  // ideal for debugging, mark4
		  // string queryTerm7 = "00wc";
		  // string queryTerm8 = "03255";

		  // queryTerms.push_back(queryTest0);
		  // queryTerms.push_back(queryTest1);
		  // queryTerms.push_back(queryTest2);
		  queryTerms.push_back(queryTest3);
		  // queryTerms.push_back(queryTest4);

		  // queryTerms.push_back(queryTest5);
		  // queryTerms.push_back(queryTest6);
		  // queryTerms.push_back(queryTest7);

		  // queryTerms.push_back(queryTerm0);
		  // queryTerms.push_back(queryTerm1);
		  // queryTerms.push_back(queryTerm2);
		  // queryTerms.push_back(queryTerm3);
		  // queryTerms.push_back(queryTerm4);
		  // queryTerms.push_back(queryTerm5);
		  // queryTerms.push_back(queryTerm6);
		  // queryTerms.push_back(queryTerm7);
		  // queryTerms.push_back(queryTerm8);



		// option1: months ago
		// command_line_args.index_files1.SetDirectory("/data1/team/weijiang/compatibleIndexesWithIRTK/gov2");

		// option2: updated 2013/01/24
		// OLD 32bit version
		// command_line_args.index_files1 = ParseIndexName("wei_pruning_development_2013-01-11-18-23-22-100%");

		// NEW 64bit version
		command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
		// for the machine pangolin
		command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
		// for the machine dodo
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		// OLD 32bit version in order to prune the original index
		// option1 (32bit lexicon with the original index):
		// command_line_args.index_files1 = ParseIndexName("index");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/originalGov2Index/");

		// option2 (32bit lexicon only the query terms):
		// command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-27-21-47-55-100%");
		// command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

		IndexCat index_cat(command_line_args.index_files1);

		time_t now;
		now = time(NULL);
		char buff[20];
		strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));

		const string output_index_prefix = "wei_pruning_development_forward_index_" + string(buff);
		// Lots of common between the pruning and layering project.
		// From now, we use the class LayeredIndexGenerator to do pruning as well.
		// Updated by Wei 2013/01/27. I still think it is a good idea extend the function of the LayeredIndexGenerator
		LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
		Timer pruning_time;
		// The following are the arguments for the BuildForwardIndex(...) function
		// argument1: vector<string> & queryTerms,
		// argument2: bool debugFlag,
		// argument3: bool store_computed_score_into_external_index_flag
		layered_index_generator.BuildForwardIndex(queryTerms, false, true);
		GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
		cout << "The output_index_prefix: " << output_index_prefix << endl;
		cout << "forward index building ends." << endl;
  }
  else if (flagString == "21"){
	      cout << "Getting the rank in the doc feature" << endl;
		  cout << "(1) Given a set of documents(sorted by their trecIDs) (done)"
				  "(2) Decompress the corresponding compress file into main memory (done)"
				  "(3) Record and sign the posting rank in the doc (need to be updated by Wei 2013/07/14)" << endl;

		  // step2: decompress the specific file and get the information
		  GetDefaultLogger().Log("Document collection information extraction...", false);

		  // Get collection indexer in order to do indexing on the specific documents.
		  CollectionIndexer& collection_indexer = GetCollectionIndexer();

		  // Input to the indexer is a list of document collection files we want to index in order.
		  // Deal with the input file.
		  // The cin object will automatically connect with the command line arguments. "< fileName"
		  collection_indexer.ProcessDocumentCollections(cin);


		  // Start timing indexing process.
		  Timer index_time;

		  //All the things have been done in this function,including the parsing stage, the posting collection stage and the index builder stage.
		  collection_indexer.ParseDocumentCollectionsAndExtractingInfoForPhase2Pruning();

		  // for debug ONLY
		  // cout << "total number of documents indexed(mark1): " << collection_indexer.doc_id() << endl;
		  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
		  // End timing indexing process.


		  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

		  uint64_t posting_count = GetPostingCollectionController().posting_count();

		  cout << "Collection Statistics:\n";
		  cout << "total posting count: " << posting_count << "\n";
		  cout << "total number of documents indexed: " << collection_indexer.doc_id() << endl;

  }
  else if (flagString == "22"){
	      cout << "This is the task for generating the Xdoc value for each document in the collection(like in gov2 dataset)" << endl;

		  // step2: decompress the specific file and get the information
		  GetDefaultLogger().Log("Document collection information extraction...", false);

		  // Get collection indexer in order to do indexing on the specific documents.
		  CollectionIndexer& collection_indexer = GetCollectionIndexer();

		  // Input to the indexer is a list of document collection files we want to index in order.
		  // Deal with the input file.
		  // The cin object will automatically connect with the command line arguments. "< fileName"
		  collection_indexer.ProcessDocumentCollections(cin);


		  // Start timing indexing process.
		  Timer index_time;

		  //All the things have been done in this function,including the parsing stage, the posting collection stage and the index builder stage.
		  collection_indexer.ParseDocumentCollectionsAndExtractingInfoForPhase2Pruning();

		  // for debug ONLY
		  // cout << "total number of documents indexed(mark1): " << collection_indexer.doc_id() << endl;
		  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
		  // End timing indexing process.


		  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

		  uint64_t posting_count = GetPostingCollectionController().posting_count();

		  cout << "Collection Statistics:\n";
		  cout << "total posting count: " << posting_count << "\n";
		  cout << "total number of documents indexed: " << collection_indexer.doc_id() << endl;

  }
  else if (flagString == "23"){
	  cout << "value 23: [Utility]This is task for outputting the <docID, trecID, URL and size of the document in words> for gov2/clueweb09B dataset(Updated by Wei 2015/02/26)" << endl;
	  // previous used indexes, months ago
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_uniform_pruning_20131105AfternoonBABY");
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/web-search-engine-wei/polyIRIndexer/protectedSetOfIndexes");
	  // clueweb09B index
	  // command_line_args.index_files1 = ParseIndexName("index_clueweb09B");
	  // command_line_args.index_files1.SetDirectory("/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes");
	  // gov2 index(UPP-5_1%) in development
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None");
	  // command_line_args.index_files1.SetDirectory("/home/vgc/wei/workspace/NYU_IRTK/data/GOV2/Gov2_indexes/");
	  // tier1_OR_1%
	  // command_line_args.index_files1 = ParseIndexName("index:0.0");
	  // command_line_args.index_files1.SetDirectory("/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/1%_docHitsIndex/subIndex0");
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // command_line_args.index_files1.SetDirectory("/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/");
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // command_line_args.index_files1.SetDirectory("/san_share/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/thatSimpleHybridApproach_20141212/0M_1M_statistics_30%_h_10/5%/all");

	  command_line_args.index_files1 = ParseIndexName("index_remapped");
	  command_line_args.index_files1.SetDirectory("/home/weijiang/workspace/NYU_IRTK/polyIRToolkit_Wei");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
	  layered_index_generator.OutputDocIDANDTrecIDANDDocSizeInWords();
  }
  else if (flagString == "24"){
	  cout << "Producing the set of probability for the randomly selected postings begins..." << std::endl;

	  // 32bit gov2 index
	  // string indexName = "index";
	  // string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index";

	  // 64bit clueweb09B index
	  // local scratch
	  // string indexName = "LEAVE_wei_uniform_pruning_2014-06-09-19-16-46_None_None";
	  // string indexPath = "/local_scratch/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes";
	  // on NFS
	  string indexName = "LEAVE_wei_uniform_pruning_2014-06-09-19-16-46_None_None";
	  string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";
	  command_line_args.index_files1 = ParseIndexName(indexName.c_str());
	  command_line_args.index_files1.SetDirectory(indexPath);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;

	  // Updated by Wei 2014/06/15 at school
	  layered_index_generator.LoadUpRandomlySelectedPostings();

	  // Updated by Wei 2014/06/15 at school
	  layered_index_generator.ProduceScoresForRandomlySelectedPostings();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Producing the set of probability for the randomly selected postings ends." << std::endl;
  }
  else if (flagString == "25"){
	  cout << "value 25: This is task for outputting the <docID, # of postings> for gov2/clueweb09B dataset (Updated by Wei 2014/06/07)" << std::endl;

	  // 64bit gov2
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 32bit gov2
	  // command_line_args.index_files1 = ParseIndexName("index");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  // 32bit clueweb09B
	  // for the vidaserver1
	  command_line_args.index_files1 = ParseIndexName("index_clueweb09B");
	  command_line_args.index_files1.SetDirectory("/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;
	  layered_index_generator.OutputingDocIDANDNumOfPostingsStoredInIndex();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if (flagString == "26"){
	  cout << "Check the newly generated 64BIT index" << endl;
	  cout << "Procedure begins..." << endl;
	  // 64Bit input for checking
	  // for vidaserver1
	  command_line_args.index_files1 = ParseIndexName("NO_USE_wei_uniform_pruning_2014-06-09-14-30-39_None_None");
	  command_line_args.index_files1.SetDirectory("/local_scratch/wei/workspace/NYU_IRTK/data/outputDirForIndexes");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;

	  layered_index_generator.CheckNewlyGenerated64BitIndex();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()) + " seconds.", false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Procedure ends." << endl;
  }
  else if (flagString == "27"){
	  cout << "compute the posting rank in list and store the rank in the external inverted index" << endl;
	  cout << "Storing Procedure Begins..." << endl;

	  // if there are some terms in queryTermsDictForDebugging, THEN it is in debug mode
	  // if there are NOTHING in queryTermsDictForDebugging(size==0), THEN it is in production mode
	  map<string,int> queryTermsDictForDebugging;
	  // string testTerm0 = "0";	// 12888029 postings in clueweb09B
	  // string testTerm1 = "00";  //  6673682 postings in clueweb09B
	  // string testTerm2 = "a";	// 44570315 postings in clueweb09B
	  string testTerm3 = "0000000"; // 2462 postings in clueweb09B
	  // queryTermsDictForDebugging[testTerm0] = 1;
	  // queryTermsDictForDebugging[testTerm1] = 1;
	  // queryTermsDictForDebugging[testTerm2] = 1;
	  queryTermsDictForDebugging[testTerm3] = 1;
	  // Updated on 2013/09/12 afternoon by Wei at school
	  // option1: 32bit version
	  command_line_args.index_files1 = ParseIndexName("index_clueweb09B");
	  // for Pangolin
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");
	  // for DODO
	  // command_line_args.index_files1.SetDirectory("N/A");
	  // for vidaserver1
	  command_line_args.index_files1.SetDirectory("/local_scratch/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes");

	  // option2: 64bit version
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // Path for the server Pangolin
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
	  // Path for the server DODO
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;
	  //TODO: ??? the output meta index, some numbers are not that consistent with the original one.
	  // The 1st argument is the vector queryTerms which have all the query terms ready for pruning.
	  // The 2ed argument is the bool argument of the debugFlag. True for opening the debugging mode, false for closing the debugging mode.
	  // The 3rd argument is the bool argument of the switch for storing the computed scores into external index. True for yes and false for no.
	  // No matter what, the external index will be generated.
	  // If the flag set to false, it will ONLY store (1)chunk max score and (2)block max score
	  // If the flag set to true, it will ALSO store (2) the aux info for each posting which is very useful in the context of pruning
	  // Updated by Wei 2013/01/26, I think I am almost there and just keep going.
	  // Notes: Let's try this today:2013/01/26

	  cout << "store the external score into the index set to be true" << endl;
	  layered_index_generator.StorePostingRankInListToExternalIndex(queryTermsDictForDebugging, false);
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Storing Procedure Ends." << endl;
  }
  else if (flagString == "28"){
	  cout << "Build a forward index including the value: posting_rank_in_list" << endl;
	  // get the rank as fast as I can. The set of terms pretending to be the document1 is:
	  // total length is 9: {all, feedback, consolidated, sources, planning, including, questions, find, looking}

      // index path options:
	  // current set of index in test
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-13-38-39_None_None";
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-10-20-32-07_None_None"; // This set of indexes has passed the test
	  string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  // some computation methods needs this info, so load it. So just do NOT need this, can just comment this out
	  // LoadUpQueryTermsProbabilityDistribution(queryTermsProbabilityDistributionMap);

      LocalQueryProcessor query_processor(command_line_args.index_files1, command_line_args.query_stop_words_list_file, command_line_args.query_algorithm_local,
                                     command_line_args.query_mode_local, command_line_args.result_format_local);
  }
  else if (flagString == "29"){
	  cout << "Prototyping the posting oriented uniform pruning method." << endl;
	  cout << "This section of logic is no longer used since 2014/03/28 afternoon by Wei at school";
	  exit(1);


	  // Step1: the index will be used for loading the postings
	  // Option1: The below index contains ONLY the query terms, the # of terms are: 38449 (a little smaller than the actual # of query terms (38871) in 100K queries)
	  // (Used for debugging)
	  string indexName = "LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%";
	  // Option2: The below index contains ALL terms in the lexicon
	  // (Used for production)
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);



	  // Updated by Wei 2013/09/17 night at school
	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();

	  // Updated by Wei 2013/09/13 afternoon
	  // Need to load up the probability table first in order to do the posting oriented pruning
	  layered_index_generator.LoadUpProbabilityTableBasedOnListLengthANDRelativeRank();

	  // Updated by Wei 2013/09/21 night
	  // Need to load up the <docID,# of postings recorded> in order to allocate the memory :)
	  // Use 60Secs to load all the things into main memory
	  // Debug, do NOT used. Production, please used.
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  // Updated by Wei 2013/09/15 afternoon at school
	  // Need to load up the <termID,term> pair in order to for pruning
	  layered_index_generator.LoadUpTermIDANDTermPairs();

	  // Updated by Wei 2013/09/22 afternoon at school
	  // This function needs to be called after the above functions have been called.
	  layered_index_generator.buildTermIDWithTheirFirstFactorProbabilityMap();

	  Timer processing_time;
	  // Currently used and optimized by Wei on 2013/09/21 afternoon at school
	  // optimized version
	  // Why so so SLOW when doing the step4???
	  // Because I need to pass the map by reference, using the & at declaration time.
	  // Solved by Wei on 2013/09/22 night at school
	  layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethodOptimizedVersion(false);
	  // un-optimized version
	  // layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethod(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;

  }
  else if (flagString == "30"){
	  cout << "This example shows how much memory will be used for the longest list." << endl;
	  // Step1: the index will be used for loading the postings
	  // Option1: The below index contains ONLY the query terms, the # of terms are: 38449 (a little smaller than the actual # of query terms (38871) in 100K queries)
	  // (Used for debugging)
	  string indexName = "LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%";
	  // Option2: The below index contains ALL terms in the lexicon
	  // (Used for production)
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  string indexPath = "/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
      command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer processing_time;

	  // Currently used and optimized by Wei on 2013/09/20 afternoon at school
	  layered_index_generator.ComputeHowMuchMemoryWillBeUsed();

	  // NOT used but leave as an reference since 2013/09/20 morning by Wei at school
	  // layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethod(false);
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if (flagString == "31"){
	  cout << "NOT yet filled begins..." << std::endl;

	  // 64bit index lexicon for ONLY query terms
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");

	  // 32bit index lexicon for all terms (preferred)
	  // Updated by Wei 2013/08/30 morning at school, usually, this is for applying to the whole lexicon that you can use the following original index
	  // Accordingly, the index_reader should be changed to 32bit as well.
	  command_line_args.index_files1 = ParseIndexName("index");
	  command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_pruning_development_universal_cutting_" + string(buff) + "-" + "None" + "-" + "None";

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer duration_time;

	  // Updated by Wei 2013/08/06 night
	  layered_index_generator.LoadUpRandomlySelectedPostings();

	  // Used since 2013/09/24 night by Wei at school to output a set of documents needed to be parsed given a set of postings as input
	  layered_index_generator.OutputASetOfDocumentsNeededToBeParsedGivenASetOfPostingsAsInput();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(duration_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "NOT yet filled ends." << std::endl;
  }
  else if (flagString == "32"){
	  cout << "Documents picking analysis. Updated by Wei on 2013/11/07 night at school." << endl;
	  cout << "This section of logic is no longer used since 2014/03/28 afternoon by Wei at school";
	  exit(1);
	  // Step1: the index will be used for loading the postings
	  // set of indexes from dodo
	  string indexName = "index";
	  string indexPath = "/home/diaosi/web-search-engine-wei/polyIRIndexer/";
	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);



	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed.
	  // Updated by Wei 2013/09/17 night at school
	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();

	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed.
	  // Updated by Wei 2013/09/13 afternoon
	  // Need to load up the probability table first in order to do the posting oriented pruning
	  layered_index_generator.LoadUpProbabilityTableBasedOnListLengthANDRelativeRank();

	  // Updated by Wei 2013/10/22 morning at school
	  // A switch should be implemented for PRODUCTION mode and DEBUGGING mode.
	  // This is needed
	  // Updated by Wei 2013/09/21 night
	  // Need to load up the <docID,# of postings recorded> in order to allocate the memory :)
	  // Use 60Secs to load all the things into main memory
	  // Debug, do NOT used. Production, please used.
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed
	  // Updated by Wei 2013/09/15 afternoon at school
	  // Need to load up the <termID,term> pair in order to for pruning
	  layered_index_generator.LoadUpTermIDANDTermPairs();

	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed
	  // Updated by Wei 2013/09/22 afternoon at school
	  // This function needs to be called after the above functions have been called.
	  layered_index_generator.buildTermIDWithTheirFirstFactorProbabilityMap();

	  Timer processing_time;
	  // Updated by Wei on 2013/11/07 night at school at dodo
	  // Currently used and optimized by Wei on 2013/10/28 afternoon at school
	  // optimized version
	  // Why so so SLOW when doing the step4???
	  // Because I need to pass the map by reference, using the & at declaration time.
	  // Solved by Wei on 2013/09/22 night at school
	  layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethodOptimizedVersion(false);
	  // un-optimized version
	  // layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethod(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if (flagString == "33"){
	  cout << "Documents picking analysis. Updated by Wei on 2013/11/06 morning at school." << endl;

	  // for dodo
	  string indexName = "LEAVE_wei_uniform_pruning_20131105AfternoonBABY";
	  string indexPath = "/home/diaosi/web-search-engine-wei/polyIRIndexer/";
	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // Updated by Wei 2013/11/05 morning at school at dodo
	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed.
	  // Updated by Wei 2013/09/17 night at school
	  layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();

	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed.
	  // Updated by Wei 2013/09/13 afternoon
	  // Need to load up the probability table first in order to do the posting oriented pruning
	  layered_index_generator.LoadUpProbabilityTableBasedOnListLengthANDRelativeRank();

	  // Updated by Wei 2013/10/22 morning at school
	  // A switch should be implemented for PRODUCTION mode and DEBUGGING mode.
	  // This is needed
	  // Updated by Wei 2013/09/21 night
	  // Need to load up the <docID,# of postings recorded> in order to allocate the memory :)
	  // Use 60Secs to load all the things into main memory
	  // Debug, do NOT used. Production, please used.
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed
	  // Updated by Wei 2013/09/15 afternoon at school
	  // Need to load up the <termID,term> pair in order to for pruning
	  layered_index_generator.LoadUpTermIDANDTermPairs();

	  // Updated by Wei 2013/10/22 morning at school
	  // This is needed
	  // Updated by Wei 2013/09/22 afternoon at school
	  // This function needs to be called after the above functions have been called.
	  layered_index_generator.buildTermIDWithTheirFirstFactorProbabilityMap();

	  Timer processing_time;
	  // Currently used and optimized by Wei on 2013/11/09 night at school
	  // Currently used and optimized by Wei on 2013/10/28 afternoon at school
	  // optimized version
	  // Why so so SLOW when doing the step4???
	  // Because I need to pass the map by reference, using the & at declaration time.
	  // Solved by Wei on 2013/09/22 night at school
	  layered_index_generator.DocumentAnalyzingAboutTheirPrefixBehaviour_OLD_VERSION(false);
	  // un-optimized version
	  // layered_index_generator.PrototypingOfThePostingOrientedUniformPruningMethod(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if (flagString == "34"){
	  // The working routine of the 2D probability table
	  // Updated by Wei on 2014/10/25
	  // Include the 3D probability table
	  cout << "Offline Document Analysis Stage begins..." << endl;
	  // The following routine are good both for gov2 and clueweb09B dataset
	  // Suitable for the gray cluster as well

	  // for the gov2
	  // this index includes all the terms from the lexicon
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // for pangolin:
	  // string indexPath = "/data/obukai/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
	  // for dodo:
	  // string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25";
	  // for moa:
	  // string indexPath = "/home/diaosi/workspace/web-search-engine-wei-2014-April/data/Gov2_indexes/prunedGov2IndexBasedOn_PartialBM25/";
	  // for vidaserver1 local:
	  // string indexPath = "/local_scratch/wei/workspace/NYU_IRTK/data/Gov2_indexes/";
	  // for vidaserver1 NFS:
	  string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/GOV2/Gov2_indexes";

	  // for the clueweb09B
	  // string indexName = "LEAVE_wei_uniform_pruning_2014-06-09-19-16-46_None_None";
	  // for vidaserver1 local:
	  // string indexPath = "/local_scratch/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";
	  // for vidaserver1 NFS:
	  // string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";

	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
	  command_line_args.index_files1.SetDirectory(indexPath);
	  GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // OLD working version used for gov2
	  // 3 sub steps
	  // for P(t) computation
	  // layered_index_generator.LoadUpAuxFilesForFirstProbabilityFactor();
	  // load up the <termID,term> pair in order to for pruning
	  // layered_index_generator.LoadUpTermIDANDTermPairs();
	  // build (termID,firstFactorProbability) dict
	  // layered_index_generator.buildTermIDWithTheirFirstFactorProbabilityMap();

	  // Updated by Wei on 2014/09/28
	  layered_index_generator.LoadUpEssentialityForPosting();

	  // used for clueweb09B/gov2
	  layered_index_generator.LoadFirstFactorProbability();

	  // load up the <docID,# of postings recorded> into main memory
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  layered_index_generator.LoadUp2DTableProbabilities();

	  Timer processing_time;

	  // optimizations
	  // Pass the map by reference, using the & at declaration time.
	  // CURRENT version using the relrank approach,

	  // option1:
	  // updated by Wei on 2014/06/14
	  // layered_index_generator.OfflineDocumentAnalysis_onlineRankComputing(false);

	  // option2:
	  // updated by Wei on 2014/08/14
	  // much faster
	  layered_index_generator.OfflineDocumentAnalysis_offlineRankRetrieving_using2DTable(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Pre Document Analysis Stage ends." << endl;
	  

	  /*
	  // The testing routine for the 3D probability table
	  // Updated by Wei on 2014/10/26 at school
	  cout << "Offline Document Analysis Stage begins..." << endl;
	  // The following routine are good both for gov2 and clueweb09B dataset
	  // Suitable for the gray cluster as well

	  // for the gov2
	  // this index includes all the terms from the lexicon
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // for vidaserver1 local:
	  // string indexPath = "/local_scratch/wei/workspace/NYU_IRTK/data/Gov2_indexes/";
	  // for vidaserver1 NFS:
	  string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Gov2_indexes";

	  // for the clueweb09B
	  // string indexName = "LEAVE_wei_uniform_pruning_2014-06-09-19-16-46_None_None";
	  // for vidaserver1 local:
	  // string indexPath = "/local_scratch/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";
	  // for vidaserver1 NFS:
	  // string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";

	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
	  command_line_args.index_files1.SetDirectory(indexPath);
	  GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // Updated by Wei on 2014/10/26 night
	  layered_index_generator.LoadUpEssentialityForPosting();

	  // used for clueweb09B/gov2
	  layered_index_generator.LoadFirstFactorProbability();

	  // load up the <docID,# of postings recorded> into main memory
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  // Load the docHit info now
	  layered_index_generator.LoadUpDocHitInfoForThirdDimension();

	  // 2D
	  // layered_index_generator.LoadUp2DTableProbabilities();
	  // 3D
	  layered_index_generator.LoadUp3DTableProbabilities();

	  Timer processing_time;

	  // optimizations
	  // Pass the map by reference, using the & at declaration time.
	  // CURRENT version using the relrank approach,

	  // option1:
	  // updated by Wei on 2014/06/14
	  // layered_index_generator.OfflineDocumentAnalysis_onlineRankComputing(false);

	  // option2:
	  // updated by Wei on 2014/10/26
	  // much faster
	  layered_index_generator.OfflineDocumentAnalysis_offlineRankRetrieving_using3DTable(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Pre Document Analysis Stage ends." << endl;
	  */

  }
  else if (flagString == "35"){
	  cout << "do_simple_popping begins..." << endl;
	  // this index do NOT have any use in the current function
	  // for gov2 index
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // for pangolin:
	  // string indexPath = "/data/obukai/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
	  // for dodo:
	  // string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25/";
      // for moa:
	  // string indexPath = "/home/diaosi/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25";
	  // for vidaserver1:
	  string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/GOV2/Gov2_indexes";

	  // for clueweb09B index
	  // N/A

	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  // Need to load up the <docID,# of postings recorded> in order to allocate the memory
	  // 60Secs to load all things into main memory
	  layered_index_generator.LoadUpDocIDANDNumOfPostingPairs();

	  // variables to change:
	  // (1) change the aux file name (gov2/clueweb09B)
	  // (2) change the value of the lower and upper bound
	  // (3) change the input Document Posting Array Values file name
	  // (4) change the output File Name Posting Popped
	  // (5) change the dynamic weight
	  string inputAuxFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kAuxFileForNavigatingBinaryPostingFile));
	  const uint32_t BOTH_COLLECTION_LOWER_BOUND = Configuration::GetResultValue<long int>(Configuration::GetConfiguration().GetNumericalValue(config_properties::kBothCollectionLowerBound));
	  const uint32_t BOTH_COLLECTION_UPPER_BOUND = Configuration::GetResultValue<long int>(Configuration::GetConfiguration().GetNumericalValue(config_properties::kBothCollectionUpperBound));
	  string inputDocumentPostingArrayValuesBinaryFileName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kDocumentPostingValuesOutputFileNameBinary));
	  string outputFileNamePostingPoppedInBinaryFormat = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kALLPostingsBeingPoppedFilePrefixNameBinary));
	  const float dynamicWeight = Configuration::GetResultValue<long int>(Configuration::GetConfiguration().GetNumericalValue(config_properties::kDynamicWeight));
	  // switch for using / NOT using dynamic weight. Updated by Wei on 2014/06/05 at school
	  // true: use dynamic weight and compute the final probability during popping
	  // false: NOT use dynamic weight and directly get the final probability from the document posting array file
	  bool staticMode = false; // This is used for Juan's BPP case ONLY
	  bool dynamicWeightSwitch = false;

	  // Checkings
	  if (inputDocumentPostingArrayValuesBinaryFileName == "N/A"){
			cout << "inputDocumentPostingArrayValuesBinaryFileName: " << inputDocumentPostingArrayValuesBinaryFileName << endl;
			exit(1);
	  }

	  if (outputFileNamePostingPoppedInBinaryFormat == "N/A"){
			cout << "outputFileNameBinary: " << outputFileNamePostingPoppedInBinaryFormat << endl;
			exit(1);
	  }

	  if (BOTH_COLLECTION_LOWER_BOUND == BOTH_COLLECTION_UPPER_BOUND){
			cout << "BOTH_COLLECTION_LOWER_BOUND: " << BOTH_COLLECTION_LOWER_BOUND << endl;
			cout << "BOTH_COLLECTION_UPPER_BOUND: " << BOTH_COLLECTION_UPPER_BOUND << endl;
			exit(1);
	  }

	  cout << "BOTH_COLLECTION_LOWER_BOUND: " << BOTH_COLLECTION_LOWER_BOUND << endl;
	  cout << "BOTH_COLLECTION_UPPER_BOUND: " << BOTH_COLLECTION_UPPER_BOUND << endl;
	  cout << "inputAuxFileName: " << inputAuxFileName << endl;
	  cout << "inputDocumentPostingArrayValuesBinaryFileName: " << inputDocumentPostingArrayValuesBinaryFileName << endl;
	  cout << "outputFileNamePostingPoppedInBinaryFormat: " << outputFileNamePostingPoppedInBinaryFormat << endl;
	  cout << "dynamicWeight: " << dynamicWeight << endl;
	  cout << "dynamicWeightSwitch: " << dynamicWeightSwitch << endl;
	  cout << "staticMode: " << staticMode << endl;

	  Timer processing_time;
	  layered_index_generator.load_aux_file_for_posting_info_file_navigation(inputAuxFileName);
	  layered_index_generator.do_simple_popping(BOTH_COLLECTION_LOWER_BOUND,BOTH_COLLECTION_UPPER_BOUND,inputDocumentPostingArrayValuesBinaryFileName,outputFileNamePostingPoppedInBinaryFormat,dynamicWeight,dynamicWeightSwitch,staticMode);
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);

	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "do_simple_popping Ends." << endl;
  }
  else if(flagString == "36"){
	  cout << "quantization is NOT USED in our pruning project since 2013/04/10afternoon" << endl;
	  exit(1);
	  cout << "Simple example to show that the quantization code from prof actually works begins..." << endl;
	  // Step1: the index will be used for loading the postings
	  // Option1: The below index contains ONLY the query terms, the # of terms are: 38449 (a little smaller than the actual # of query terms (38871) in 100K queries)
	  // (Used for debugging)
	  string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // Option2: The below index contains ALL terms in the lexicon
	  // (Used for production)
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // for pangolin:
	  // string indexPath = "/data/obukai/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
	  // for dodo:
	  // string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25/";
      // for moa:
	  string indexPath = "/home/diaosi/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path:" + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
	  Timer processing_time;
	  layered_index_generator.quantization();
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Simple example to show that the quantization code from prof actually works ends." << endl;
  }
  else if(flagString == "37"){
	  cout << "compute the posting rank in the list and output them in binary format" << endl;
	  cout << "Procedure Begins..." << endl;

	  // option1: OLD 32bit version
	  command_line_args.index_files1 = ParseIndexName("index_clueweb09B");
	  // Path for the server Pangolin
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");
	  // Path for the server DODO
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/workspace/web-search-engine-wei-qi-2014/poly-ir-toolkit");
	  // Path for the server vidaserver1
	  command_line_args.index_files1.SetDirectory("/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes");

	  // option2: NEW 64bit version
	  // command_line_args.index_files1 = ParseIndexName("LEAVE_wei_pruning_development_testing_2013-01-31-21-40-18-100%");
	  // Path for the server Pangolin
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
	  // Path for the server DODO
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";

	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;

	  layered_index_generator.ComputePostingRankInListForClueweb09B();
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Procedure Ends." << endl;
  }
  else if(flagString == "38"){
	  cout << "output the impact score for the gov2 dataset" << endl;
	  cout << "Procedure Begins..." << endl;

	  // option1: OLD 32bit version
	  // command_line_args.index_files1 = ParseIndexName("test_clueweb");
	  // Path for the server Pangolin
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/originalGov2Index");
	  // Path for the server DODO
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/workspace/web-search-engine-wei-qi-2014/poly-ir-toolkit");

	  // option2: NEW 64bit version
	  command_line_args.index_files1 = ParseIndexName("LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None");
	  // Path for the server Pangolin
	  // command_line_args.index_files1.SetDirectory("/data3/obukai/the_new_trip_of_feature_generation/gov2ClearYourMindAndDoItAgain/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25");
	  // Path for the server DODO
	  // command_line_args.index_files1.SetDirectory("/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25");
	  // Path for the server moa:
	  command_line_args.index_files1.SetDirectory("/home/diaosi/workspace/web-search-engine-wei-2014-April/data/Gov2_indexes/prunedGov2IndexBasedOn_PartialBM25");

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";

	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;
	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer pruning_time;

	  layered_index_generator.OutputImpactScoreForGov2();
	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(pruning_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Procedure Ends." << endl;
  }
  else if(flagString == "39"){
	  cout << "output the simple document posting array for the clueweb09B dataset" << endl;
	  // The format is simple, docID, # of postings followed by a list of posting list

	  // 64bit gov2
	  // this index includes all the terms from the lexicon
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // for pangolin:
	  // string indexPath = "/data/obukai/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
	  // for dodo:
	  // string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25";
      // for moa:
	  // string indexPath = "/home/diaosi/workspace/web-search-engine-wei-2014-April/data/Gov2_indexes/prunedGov2IndexBasedOn_PartialBM25/";

	  // 32bit clueweb09B
	  string indexName = "index_clueweb09B";
	  string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes";
	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
      command_line_args.index_files1.SetDirectory(indexPath);
      GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer processing_time;

	  layered_index_generator.OutputSimpleDocumentPostingArray();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if(flagString == "40"){
	  cout << "Get rank from external index(In debug)..." << endl;
	  // Updated by Wei on 2014/06/17 at school
	  // The following routine are good both for gov2 and clueweb09B dataset
	  // And suitable for the gray cluster as well

	  // for the gov2
	  // this index includes all the terms from the lexicon
	  // string indexName = "LEAVE_wei_uniform_pruning_2013-09-12-16-12-30_None_None";
	  // for pangolin:
	  // string indexPath = "/data/obukai/gov2ClearYourMindAndDoItAgain2014/outputDirForIndexes/prunedGov2IndexBasedOn_PartialBM25/";
	  // for dodo:
	  // string indexPath = "/home/diaosi/outputDirForIndexes/prunedGov2Index/prunedGov2IndexBasedOn_PartialBM25";
	  // for moa:
	  // string indexPath = "/home/diaosi/workspace/web-search-engine-wei-2014-April/data/Gov2_indexes/prunedGov2IndexBasedOn_PartialBM25/";
	  // for vidaserver1:
	  // string indexPath = "/local_scratch/wei/workspace/NYU_IRTK/data/Gov2_indexes/";

	  // for the clueweb09B
	  // string indexName = "LEAVE_wei_uniform_pruning_2014-06-09-19-16-46_None_None";
	  // for vidaserver1 local:
	  // string indexPath = "/local_scratch/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";
	  // for vidaserver1 NFS:
	  string indexName = "LEAVE_wei_uniform_pruning_2014-06-09-19-16-46_None_None";
	  string indexPath = "/home/vgc/wei/workspace/NYU_IRTK/data/Clueweb09B_indexes/";

	  // string indexName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kCurrentlyUsingPrefixIndexName));
	  // string indexPath = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kCurrentlyUsingIndexPath));

	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
	  command_line_args.index_files1.SetDirectory(indexPath);
	  GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer processing_time;

	  // optimizations
	  // Note:
	  // Pass the map by reference, using the & at declaration time.
	  // CURRENT version using the relrank approach, updated by Wei on 2014/06/14
	  layered_index_generator.GetRankFromExternalIndex(false);

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
	  cout << "Pre Document Analysis Stage ends." << endl;
  }
  else if(flagString == "41"){
	  cout << "20140906" << endl;
	  cout << "STOP: Please see the Convert() function in the polyIRToolkit_Wei.cc file" << endl;
	  exit(1);
  }
  else if(flagString == "42"){
	  // for vidaserver1 NFS:
	  string indexName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTempIndexName));
	  string indexPath = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTempIndexPath));

	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
	  command_line_args.index_files1.SetDirectory(indexPath);
	  GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer processing_time;
	  layered_index_generator.RecordTermUpperBoundsOfTermsForWANDANDMaxScore();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if(flagString == "43"){
	  // for local:
	  string indexName = "index";
	  string indexPath = "/home/weijiang/workspace/ORIGINAL_POLY_IRTK";

	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
	  command_line_args.index_files1.SetDirectory(indexPath);
	  GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer processing_time;
	  layered_index_generator.Make64BitLexicon();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;
  }
  else if(flagString == "44"){
	  cout << "Computation for the 3D denominator table" << endl;
	  // Because it actually needs the docID to do the one by one mapping for the denominator, so this will become a little complicated than the previous version of the computation of the denominator table
	  // for vidaserver1 NFS:
	  string indexName = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTempIndexName));
	  string indexPath = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kTempIndexPath));

	  command_line_args.index_files1 = ParseIndexName( indexName.c_str() );
	  command_line_args.index_files1.SetDirectory(indexPath);
	  GetDefaultLogger().Log("Look for Indexes in the following path: " + indexPath, false);

	  time_t now;
	  now = time(NULL);
	  char buff[20];
	  strftime(buff, 20, "%Y-%m-%d-%H-%M-%S", localtime(&now));
	  const string output_index_prefix = "NO_USE_wei_uniform_pruning_" + string(buff) + "_" + "None" + "_" + "None";
	  cout << "output_index_prefix(ONLY the *.ext index will be useful):" << output_index_prefix << endl;

	  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);

	  Timer processing_time;

	  layered_index_generator.Compute3DDenominatorTable();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(processing_time.GetElapsedTime()), false);
	  cout << "The output_index_prefix: " << output_index_prefix << endl;

  }
  else{
	  cout << "System do NOT recognize the flag value." << endl;
	  exit(1);
  }
}



void Diff() {
  IndexDiff index_diff(command_line_args.index_files1, command_line_args.index_files2);
  index_diff.Diff(command_line_args.term, command_line_args.term_len);
}




void Index() {
  GetDefaultLogger().Log("Indexing document collection...", false);

  // Get collection indexer in order to do indexing on the specific documents.
  CollectionIndexer& collection_indexer = GetCollectionIndexer();

  // Input to the indexer is a list of document collection files we want to index in order.
  // Deal with the input file.
  collection_indexer.ProcessDocumentCollections(cin);

  // Start timing indexing process.
  Timer index_time;

  collection_indexer.ParseDocumentCollections();

  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);

  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

  uint64_t posting_count = GetPostingCollectionController().posting_count();

  cout << "Collection Statistics:\n";
  cout << "total posting count: " << posting_count << "\n";
  cout << "total number of documents indexed: " << collection_indexer.doc_id() << endl;
}

IndexFiles ParseIndexName(const char* index_name) {
  assert(index_name != NULL);

  const char* colon = strchr(index_name, ':');
  if (colon != NULL) {
    const char* dot = strchr(colon + 1, '.');
    if (dot != NULL && (dot - colon) > 1 && strlen(dot + 1) > 0) {
      int group_num, file_num;
      group_num = atoi(colon + 1);
      file_num = atoi(dot + 1);
      return IndexFiles(string(index_name, (colon - index_name)), group_num, file_num);
    } else {
      GetErrorLogger().Log("Invalid index name specified on command line.", true);
    }
  } else {
    return IndexFiles(index_name);
  }

  return IndexFiles();
}

void SetConfigurationOption(string key_value) {
  size_t eq = key_value.find('=');
  if (eq != string::npos) {
    string key = key_value.substr(0, eq);
    string value = key_value.substr(eq + 1);
    bool override = Configuration::GetConfiguration().SetKeyValue(key, value);
    cout << key << " = " << value << (override ? " (override)" : " (add)") << endl;
  }
}

void UnrecognizedOptionValue(const char* option_name, const char* option_value) {
  cout << "Option '" << string(option_name) << "' has an unrecognized value of '" << string(option_value) << "'" << endl;
  exit(0);
}

void SignalHandlerIndex(int sig) {
/*  GetDefaultLogger().Log("Received termination request. Cleaning up now...", false);

  CollectionIndexer& collection_indexer = GetCollectionIndexer();
  collection_indexer.OutputDocumentCollectionDocIdRanges(document_collections_doc_id_ranges_filename);

  PostingCollectionController& posting_collection_controller = GetPostingCollectionController();
  // FIXME: It's possible that the parser callback will call this simultaneously as we're cleaning up.
  //        Set some special variable in class that's feeding the parser to indicate it to finish up.
  posting_collection_controller.Finish();*/

  exit(0);
}



// TODO: Proper cleanup needed, depending on what mode the program is running in. Delete incomplete indices, etc. Be careful about overwriting indices.
void InstallSignalHandler() {
  struct sigaction sig_action;
  sig_action.sa_flags = 0;
  // Mask SIGINT.
  sigemptyset(&sig_action.sa_mask);
  sigaddset(&sig_action.sa_mask, SIGINT);

  // Install the signal handler for the correct mode we were started in.
  switch (command_line_args.mode) {
    case CommandLineArgs::kIndex:
      sig_action.sa_handler = SignalHandlerIndex;
      break;
    default:
      sig_action.sa_handler = SIG_DFL;
      break;
  }

  sigaction(SIGINT, &sig_action, 0);
}

void Remap() {
  GetDefaultLogger().Log("Creating remapped index...", false);
  const char* output_index_prefix = (command_line_args.output_index_prefix != NULL ? command_line_args.output_index_prefix : "index_remapped");
  IndexRemapper index_remapper(command_line_args.index_files1, output_index_prefix);
  index_remapper.GenerateMap(command_line_args.doc_mapping_file);
  Timer remapping_time;
  index_remapper.Remap();
  GetDefaultLogger().Log("Time Elapsed: " + Stringify(remapping_time.GetElapsedTime()), false);
}

void Layerify() {
  GetDefaultLogger().Log("Creating layered index...", false);
  const char* output_index_prefix = (command_line_args.output_index_prefix != NULL ? command_line_args.output_index_prefix : "index_layered");
  LayeredIndexGenerator layered_index_generator = LayeredIndexGenerator(command_line_args.index_files1, output_index_prefix);
  Timer layering_time;
  layered_index_generator.CreateLayeredIndex();
  // layered_index_generator.RecordTermUpperBoundsOfTermsForWANDANDMaxScore();
  GetDefaultLogger().Log("Time Elapsed: " + Stringify(layering_time.GetElapsedTime()), false);
}

void GenerateUrlSortedDocIdMappingFile(const char* document_urls_filename) {
  GetDefaultLogger().Log("Generating URL sorted docID mapping file...", false);
  CollectionUrlExtractor collection_url_extractor;
  collection_url_extractor.ProcessDocumentCollections(cin);
  Timer url_extraction_time;
  collection_url_extractor.ParseTrec(document_urls_filename);
  GetDefaultLogger().Log("Time Elapsed: " + Stringify(url_extraction_time.GetElapsedTime()), false);
}

void Convert() {
	  cout << "Given a posting incoming stream, build a toolkit compatible index out of it. Updated by Wei on 2014/10/12" << endl;
	  GetDefaultLogger().Log("Converting incoming posting stream...", false);

	  // Get collection indexer in order to do indexing on the specific documents.
	  CollectionIndexer& collection_indexer = GetCollectionIndexer();

	  // Start timing indexing process.
	  Timer index_time;

	  collection_indexer.ProcessIncomingPostingStream();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
}

void Convert2(){
	  cout << "Given a posting incoming stream, build a toolkit compatible index out of it. Updated by Wei on 2014/11/20" << endl;
	  GetDefaultLogger().Log("Converting incoming posting stream...", false);

	  // Get collection indexer in order to do indexing on the specific documents.
	  CollectionIndexer& collection_indexer = GetCollectionIndexer();

	  // Start timing indexing process.
	  Timer index_time;

	  // collection_indexer.ProcessIncomingPostingStreamForPostingHit();
	  collection_indexer.ProcessIncomingPostingStream();

	  GetDefaultLogger().Log("Time Elapsed: " + Stringify(index_time.GetElapsedTime()) + " seconds", false);
}

void Init() {
  InstallSignalHandler();

#ifndef NDEBUG
  cout << "Compiled with assertions enabled.\n" << endl;
#endif
}

int main(int argc, char** argv) {
  //put the argv info into the data structure: command_line_args ONLY
  const char* opt_string = "ibslmqcdh";
  const struct option long_opts[] = { // Index the document collection bundles.
                                      { "index", no_argument, NULL, 'i' },

                                      // The whole system will act as an distributed information broker.
                                      { "broker", no_argument, NULL, 'b' },

                                      // The whole system will act as an distributed slave infomation provider.
                                      { "slave", no_argument, NULL, 's' },

                                      // The whole system will act independetly.
                                      { "local", no_argument, NULL, 'l' },

                                      // Merge the indices generated during the indexing step.
                                      { "merge", no_argument, NULL, 'm' },

                                      // Override the default merge degree.
                                      { "merge-degree", required_argument, NULL, 0 },

                                      // Specify the files to merge and their resulting index names on stdin.
                                      { "merge-input", no_argument, NULL, 0 },

                                      // Query an index.
                                      { "query", no_argument, NULL, 'q' },

                                      // Set which query algorithm we want to use.
                                      { "query-algorithm", required_argument, NULL, 0 },

                                      // Set which query mode we want to use.
                                      { "query-mode", required_argument, NULL, 0 },

                                      // Use the following stop word list at query time.
                                      { "query-stop-list-file", required_argument, NULL, 0 },

                                      // Set which result format we want to use.
                                      { "result-format", required_argument, NULL, 0 },

                                      // Outputs inverted list data in a human readable format.
                                      { "cat", no_argument, NULL, 'c' },

                                      // Specify the inverted list (term) on which we want to run the cat procedure.
                                      { "cat-term", required_argument, NULL, 0 },

                                      // Outputs the differences between two inverted lists.
                                      { "diff", no_argument, NULL, 'd' },

                                      // Specify the inverted list (term) on which we want to run the diff procedure.
                                      { "diff-term", required_argument, NULL, 0 },

                                      // Remaps an index. The argument specifies the document mapping file to use for the remap procedure.
                                      { "remap", required_argument, NULL, 0 },

                                      // Creates a layered index.
                                      { "layerify", no_argument, NULL, 0 },

                                      // convert operation.
                                      { "convert", no_argument, NULL, 0 },

                                      // Retrieves index data for an inverted list into an in-memory array. See function 'RetrieveIndexData()'.
                                      { "retrieve-index-data", required_argument, NULL, 0 },

                                      // Loops over an inverted list (decompresses but does not do any top-k). Useful for benchmarking decompression coders.
                                      { "loop-over-index-data", required_argument, NULL, 0 },

                                      // Loads the index into main memory.
                                      { "in-memory-index", no_argument, NULL, 0 },

                                      // Memory maps the index into our address space.
                                      { "memory-map-index", no_argument, NULL, 0 },

                                      // Builds an in-memory block level index.
                                      { "block-level-index", no_argument, NULL, 0 },

                                      // Loads and uses the external index during query processing. Some query algorithms require it.
                                      // TODO: Currently not used. Algorithms that require it automatically load the external index.
                                      { "use-external-index", no_argument, NULL, 0 },

                                      // Generates a docID mapping file (docIDs are remapped by URL) that can be used as input to the remap procedure.
                                      { "generate-url-sorted-doc-mapping", required_argument, NULL, 0 },

                                      // Overrides/adds options defined in the configuration file.
                                      { "config-options", required_argument, NULL, 0 },

                                      // Runs compression tests on some randomly generated data.
                                      { "test-compression", no_argument, NULL, 0 },

                                      // Tests a specific coder.
                                      { "test-coder", required_argument, NULL, 0 },

                                      // Print help information.
                                      { "help", no_argument, NULL, 'h' },

                                      // Terminate options list.
                                      { NULL, no_argument, NULL, 0 } };

  int opt, long_index;
  while ((opt = getopt_long(argc, argv, opt_string, long_opts, &long_index)) != -1) {
    switch (opt) {
      case 'l':
        command_line_args.role = CommandLineArgs::kLocal;
        break;

      case 'i':
        command_line_args.mode = CommandLineArgs::kIndex;
        break;

      case 'm':
        command_line_args.mode = CommandLineArgs::kMergeInitial;
        break;

      case 'q':
        command_line_args.mode = CommandLineArgs::kQuery;
        break;

      case 'c':
        command_line_args.mode = CommandLineArgs::kCat;
        break;

      case 'd':
        command_line_args.mode = CommandLineArgs::kDiff;
        break;

      case 'h':
        Help();
        return EXIT_SUCCESS;

      case 0:
        // Process options which do not have a short arg.
        if (strcmp("merge-degree", long_opts[long_index].name) == 0) {
          command_line_args.merge_degree = atoi(optarg);
        } else if (strcmp("merge-input", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kMergeInput;
        } else if (strcmp("query-algorithm", long_opts[long_index].name) == 0) {
          if (strcmp("default", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDefault;
          else if (strcmp("daat-and", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDaatAnd;
          else if (strcmp("daat-or", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDaatOr;
          else if (strcmp("taat-or", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kTaatOr;
          else if (strcmp("dual-layered-overlapping-daat", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredOverlappingDaat;
          else if (strcmp("dual-layered-overlapping-merge-daat", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredOverlappingMergeDaat;
          else if (strcmp("multi-layered-daat-or", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kMultiLayeredDaatOr;
          else if (strcmp("multi-layered-daat-or-max-score", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kMultiLayeredDaatOrMaxScore;
          else if (strcmp("layered-taat-or-early-terminated", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kLayeredTaatOrEarlyTerminated;
          else if (strcmp("wand", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kWand;
          else if (strcmp("dual-layered-wand", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredWand;
          else if (strcmp("max-score", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kMaxScore;
          else if (strcmp("dual-layered-max-score", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDualLayeredMaxScore;
          else if (strcmp("daat-and-top-positions", optarg) == 0)
            command_line_args.query_algorithm_local = LocalQueryProcessor::kDaatAndTopPositions;
          else
            UnrecognizedOptionValue(long_opts[long_index].name, optarg);
        } else if (strcmp("query-mode", long_opts[long_index].name) == 0) {
			if (strcmp("interactive", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kInteractive;
			else if (strcmp("interactive-single", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kInteractiveSingle;
			else if (strcmp("getPostingRankInListInteractive", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kGetPostingRankInListInteractive;
			else if (strcmp("bm25wsep", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kSpecialBM25wsep;
			else if (strcmp("special1", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kSpecialPurpose1;
			else if (strcmp("batch", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kBatch;
			else if (strcmp("getPostingRankInListBatch", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kGetPostingRankInListBatch;
			else if (strcmp("batch-bench", optarg) == 0)
			  command_line_args.query_mode_local = LocalQueryProcessor::kBatchBench;
			else
			  UnrecognizedOptionValue(long_opts[long_index].name, optarg);
        } else if (strcmp("query-stop-list-file", long_opts[long_index].name) == 0) {
          command_line_args.query_stop_words_list_file = optarg;
        } else if (strcmp("result-format", long_opts[long_index].name) == 0) {
			if (strcmp("trec", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kTrec;
			else if (strcmp("compare", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kCompare;
			else if (strcmp("discard", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kDiscard;
			else if (strcmp("pruning", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kPruning;
			else if (strcmp("tiering", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kTiering;
			else if (strcmp("special", optarg) == 0)
			  command_line_args.result_format_local = LocalQueryProcessor::kSpecial;
			else
			  UnrecognizedOptionValue(long_opts[long_index].name, optarg);
        } else if (strcmp("remap", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kRemap;
          command_line_args.doc_mapping_file = optarg;
        } else if (strcmp("layerify", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kLayerify;
        } else if (strcmp("convert", long_opts[long_index].name) == 0) {
            command_line_args.mode = CommandLineArgs::kConvert;
        } else if (strcmp("cat-term", long_opts[long_index].name) == 0 || strcmp("diff-term", long_opts[long_index].name) == 0) {
          command_line_args.term_len = strlen(optarg);
          command_line_args.term = optarg;
        } else if (strcmp("retrieve-index-data", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kRetrieveIndexData;
          command_line_args.term_len = strlen(optarg);
          command_line_args.term = optarg;
        } else if (strcmp("loop-over-index-data", long_opts[long_index].name) == 0) {
          command_line_args.mode = CommandLineArgs::kLoopOverIndexData;
          command_line_args.term_len = strlen(optarg);
          command_line_args.term = optarg;
        } else if (strcmp("in-memory-index", long_opts[long_index].name) == 0) {
          command_line_args.in_memory_index = true;
          SetConfigurationOption(string(config_properties::kMemoryResidentIndex) + string("=true"));
        } else if (strcmp("memory-map-index", long_opts[long_index].name) == 0) {
          command_line_args.memory_mapped_index = true;
          SetConfigurationOption(string(config_properties::kMemoryMappedIndex) + string("=true"));
        } else if (strcmp("block-level-index", long_opts[long_index].name) == 0) {
          SetConfigurationOption(string(config_properties::kUseBlockLevelIndex) + string("=true"));
        } else if (strcmp("use-external-index", long_opts[long_index].name) == 0) {
          command_line_args.use_external_index = true;
        } else if (strcmp("generate-url-sorted-doc-mapping", long_opts[long_index].name) == 0) {
          GenerateUrlSortedDocIdMappingFile(optarg);
          return EXIT_SUCCESS;
        } else if (strcmp("config-options", long_opts[long_index].name) == 0) {
        	OverrideConfigurationOptions(optarg);
        } else if (strcmp("test-compression", long_opts[long_index].name) == 0) {
            cout << "TestCompression() called" << endl;

          return EXIT_SUCCESS;
        } else if (strcmp("test-coder", long_opts[long_index].name) == 0) {
            cout << "TestCoder(optarg) called" << endl;

          return EXIT_SUCCESS;
        }
        break;

      default:
        cout << "SeekHelp() function called" << endl;
        return EXIT_SUCCESS;
    }
  }

  char** input_files = argv + optind;
  int num_input_files = argc - optind;

  switch (command_line_args.role){
    case CommandLineArgs::kLocal:
      switch (command_line_args.mode) {
        // These take an index name as the argument.
        case CommandLineArgs::kCat:
        case CommandLineArgs::kLoopOverIndexData:
        case CommandLineArgs::kQuery:
        case CommandLineArgs::kRetrieveIndexData:
          for (int i = 0; i < num_input_files; ++i) {
            switch (i) {
              case 0:
                command_line_args.index_files1 = ParseIndexName(input_files[i]);
                break;
            }
          }
          break;

        // These take an index name to operate on and an output index name as the arguments.
        case CommandLineArgs::kLayerify:
        case CommandLineArgs::kConvert:
        case CommandLineArgs::kRemap:
          for (int i = 0; i < num_input_files; ++i) {
            switch (i) {
              case 0:
                command_line_args.index_files1 = ParseIndexName(input_files[i]);
                break;
              case 1:
                command_line_args.output_index_prefix = input_files[i];
                break;
            }
          }
          break;

        // These take two index names as the arguments.
        case CommandLineArgs::kDiff:
          for (int i = 0; i < num_input_files; ++i) {
            switch (i) {
              case 0:
                command_line_args.index_files1 = ParseIndexName(input_files[i]);
                break;
              case 1:
                command_line_args.index_files2 = ParseIndexName(input_files[i]);
                break;
            }
          }
          break;

        // These don't take any arguments.
        case CommandLineArgs::kIndex:
        case CommandLineArgs::kMergeInitial:
        case CommandLineArgs::kMergeInput:
        case CommandLineArgs::kNoIdea:
          break;
      }

      Init();
      srand(time(NULL));

      switch (command_line_args.mode) {
        case CommandLineArgs::kIndex:
          //begin the indexing pipeline.
          Index();
          break;
        case CommandLineArgs::kQuery:
          Query();
          break;
        case CommandLineArgs::kMergeInitial:
          MergeInitial();
          break;
        case CommandLineArgs::kMergeInput:
          cout << "MergeInput() function called." << endl;
          break;
        case CommandLineArgs::kRemap:
          Remap();
          break;
        case CommandLineArgs::kLayerify:
          Layerify();
          break;
        case CommandLineArgs::kConvert:
          // Convert();
          Convert2();
          break;
        case CommandLineArgs::kCat:
          Cat();
          break;
        case CommandLineArgs::kDiff:
          Diff();
          break;
        case CommandLineArgs::kRetrieveIndexData:
          cout << "RetrieveIndexData() function called." << endl;
          break;
        case CommandLineArgs::kLoopOverIndexData:
          cout << "LoopOverIndexData() function called." << endl;
          break;
        default:
          cout << "help() function called." << endl;
          break;
      }
      break;
    case CommandLineArgs::kNoRole:
      Help();
      break;
  }
  return EXIT_SUCCESS;
}
