//==============================================================================================================================================================
// Author(s): Roman Khmelichek, Wei Jiang
//
//==============================================================================================================================================================

#include "document_collection.h"


/**************************************************************************************************************************************************************
 * Document
 *
 **************************************************************************************************************************************************************/
Document::Document(const char* doc_buf, int doc_len, const char* url_buf, int url_len, uint32_t doc_id) :
  doc_buf_(doc_buf), doc_len_(doc_len), url_buf_(url_buf), url_len_(url_len), doc_id_(doc_id) {
}

/**************************************************************************************************************************************************************
 * DocumentCollection
 *
 **************************************************************************************************************************************************************/
DocumentCollection::DocumentCollection(const string& file_path) :
  processed_(false), file_path_(file_path), lang_(DocumentCollection::ENGLISH), initial_doc_id_(0), final_doc_id_(0) {
}

/*
 * return the document_collection_buf_len
 * */
int DocumentCollection::Fill(char** document_collection_buf, int* document_collection_buf_size) {
  int document_collection_buf_len;
  UncompressFile(file_path_.c_str(), document_collection_buf, document_collection_buf_size, &document_collection_buf_len);
  return document_collection_buf_len;
}

/**************************************************************************************************************************************************************
 * Class IndexCollection
 *
 **************************************************************************************************************************************************************/

// Add document collection using the path given by the argument.
void IndexCollection::AddDocumentCollection(const string& path) {
  ifstream ifs;
  ifs.open(path.c_str(), ifstream::in);
  ifs.close();
  if (!ifs.fail()) {
    DocumentCollection doc_collection(path);
    doc_collections_.push_back(doc_collection);
  } else {
    GetErrorLogger().Log("Could not open document collection file '" + path + "'. Skipping...", false);
  }
}

// Empty document collections.
void IndexCollection::EmptyDocumentCollections() {
	doc_collections_.clear();
	//GetErrorLogger().Log("doc_collections_.clear()", false);

}


//Deal with the input file and adding the paths to parse.
void IndexCollection::ProcessDocumentCollections(istream& is) {
  string path;
  while (getline(is, path)) {
    if (path.size() > 0)
      AddDocumentCollection(path);
  }
}

/**************************************************************************************************************************************************************
 * CollectionIndexer
 *
 **************************************************************************************************************************************************************/
CollectionIndexer::CollectionIndexer() :
  document_collection_buffer_size_(atol(Configuration::GetConfiguration().GetValue(config_properties::kDocumentCollectionBufferSize).c_str())),
  // Updated by Wei on 2014/07/09
  // version1:
  // parser_callback_(&GetPostingCollectionController(), &GetEdgeCollectionController() )
  // version2:
  // parser_callback_(&GetPostingCollectionController() )
  document_collection_buffer_(new char[document_collection_buffer_size_]), parser_callback_(&GetPostingCollectionController()),
      parser_(Parser<IndexingParserCallback>::kManyDoc, GetAndVerifyDocType(), &parser_callback_), doc_id_(0), avg_doc_length_(0) {

  if (document_collection_buffer_size_ == 0){
	  GetErrorLogger().Log("Check configuration setting for '" + string(config_properties::kDocumentCollectionBufferSize) + "'.", true);
  }
}

CollectionIndexer::~CollectionIndexer() {
  delete[] document_collection_buffer_;
}

Parser<IndexingParserCallback>::DocType CollectionIndexer::GetAndVerifyDocType() {
  string document_collection_format =
      Configuration::GetResultValue(Configuration::GetConfiguration().GetStringValue(config_properties::kDocumentCollectionFormat));

  Parser<IndexingParserCallback>::DocType doc_type = Parser<IndexingParserCallback>::GetDocumentCollectionFormat(document_collection_format.c_str());
  if (doc_type == Parser<IndexingParserCallback>::kNoSuchDocType) {
    Configuration::ErroneousValue(config_properties::kDocumentCollectionFormat, document_collection_format);
  }

  return doc_type;
}

//Special function for wei to use, updated 2012/06/27
void CollectionIndexer::showContentOfDocumentForWei(const long beginningPosition, const long endingPosition,const char* term, int term_len)
{

  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i)
  {
    GetDefaultLogger().Log("Processing: " + i->file_path(), false);

    int document_collection_buffer_len = i->Fill(&document_collection_buffer_, &document_collection_buffer_size_);

    // set but not used variable here is document_collection_buffer_len
    if(false){
    	cout << document_collection_buffer_len << endl;
    }

    char* document_starting_point = document_collection_buffer_ + beginningPosition;
    int document_size_in_bytes = endingPosition-beginningPosition;
    parser_.ShowContentOfSpecifcDocument(document_starting_point, document_size_in_bytes ,beginningPosition,endingPosition,term,term_len);
  }
}

//Special function for wei to use, updated 2012/06/27
void CollectionIndexer::showContentOfDocumentForWei(const long beginningPosition, const long endingPosition, vector<string> &queryID_Term_docIDList, string currentDocID, bool reloadCompressedFileFlag,ofstream &outputFileHandler)
{
	char* document_starting_point;
	int document_size_in_bytes = endingPosition-beginningPosition;
	if (reloadCompressedFileFlag){
		  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i)
		  {
		    GetDefaultLogger().Log("Retrieve and Uncompress: " + i->file_path() + " in memory", false);

		    int document_collection_buffer_len = i->Fill(&document_collection_buffer_, &document_collection_buffer_size_);

		    // set but not used variable here is document_collection_buffer_len
		    if(false){
		    	cout << document_collection_buffer_len << endl;
		    }

		    document_starting_point = document_collection_buffer_ + beginningPosition;
		    parser_.ShowContentOfSpecifcDocument(document_starting_point, document_size_in_bytes ,beginningPosition,endingPosition,queryID_Term_docIDList,currentDocID, outputFileHandler);
		  }
	}
	else{
		//use the old buffer and will be fine.
		GetDefaultLogger().Log("Directly use the uncompressed file in memory", false);
		document_starting_point = document_collection_buffer_ + beginningPosition;
		parser_.ShowContentOfSpecifcDocument(document_starting_point, document_size_in_bytes ,beginningPosition,endingPosition,queryID_Term_docIDList,currentDocID, outputFileHandler);
	}
}


//Special function for wei to use, updated 2012/06/27
void CollectionIndexer::showContentOfDocumentForWei(long beginningPosition, long endingPosition, string currentDocID, bool reloadCompressedFileFlag)
{
	char* document_starting_point;
	int document_size_in_bytes = endingPosition-beginningPosition;
	if (reloadCompressedFileFlag){
		  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i)
		  {
		    GetDefaultLogger().Log("Retrieve and Uncompress: " + i->file_path() + " in memory", false);

		    int document_collection_buffer_len = i->Fill(&document_collection_buffer_, &document_collection_buffer_size_);

		    // set but not used variable here is document_collection_buffer_len
		    if(false){
		    	cout << document_collection_buffer_len << endl;
		    }

		    document_starting_point = document_collection_buffer_ + beginningPosition;
		    parser_.ShowContentAndSimpleParse(document_starting_point, document_size_in_bytes);
		  }
	}
	else{
		//use the old buffer and will be fine.
		GetDefaultLogger().Log("Directly use the uncompressed file in memory", false);
		document_starting_point = document_collection_buffer_ + beginningPosition;
		parser_.ShowContentAndSimpleParse(document_starting_point, document_size_in_bytes);
	}
}

// This function is used for parsing gov2 dataset ONLY currently.
void CollectionIndexer::ParseDocumentCollectionsAndExtractingInfoForPhase2Pruning()
{
  // First, read the collection type from the .conf file to tell whether it is WARC type or TREC type.
  string collectionFormat = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kDocumentCollectionFormat));

  int total_num_docs_found = 0;

  //TODO: currently, just do a string comparison. In future, more proper way should be needed.
  if (collectionFormat == "warc"){
	  GetDefaultLogger().Log("dataset input format:CLUEWEB2009", false);
	  GetDefaultLogger().Log("This dataset format is currently NOT supported", true);
  }
  else if (collectionFormat == "trec"){
	  GetDefaultLogger().Log("dataset input format:GOV2", false);
	  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i){
	    GetDefaultLogger().Log("Processing: " + i->file_path(), false);
	    int document_collection_buffer_len = i->Fill(&document_collection_buffer_, &document_collection_buffer_size_);
	    i->set_initial_doc_id(doc_id_);
	    int num_docs_found = parser_.ParseDocumentCollectionAndExtractingInfoForPhase2Pruning(document_collection_buffer_, document_collection_buffer_len, doc_id_, avg_doc_length_);
	    i->set_processed(true);
	    GetDefaultLogger().Log("Found: " + Stringify(num_docs_found) + " documents.", false);
	    i->set_final_doc_id(doc_id_ - 1);
	    total_num_docs_found += num_docs_found;
	  }
  }
  else{
	  GetDefaultLogger().Log("Unknown NOT supported dataset format", true);
  }

  GetDefaultLogger().Log("Total number of documents found(total_num_docs_found): " + Stringify(total_num_docs_found), false);
  GetDefaultLogger().Log("Total number of documents indexed(doc_id_): " + Stringify(doc_id_), false);

  GetPostingCollectionController().Finish();
}

// This function is used for parsing either the TREC or WARC dataset.
// TODO: Currently, it is only good for part of the WARC dataset, I have to fix it to support the TREC dataset as proper as possible.
void CollectionIndexer::ParseDocumentCollections(){
  // either WARC(clueweb09) or TREC(gov2) type
  string collectionFormat = Configuration::GetResultValue( Configuration::GetConfiguration().GetStringValue(config_properties::kDocumentCollectionFormat));

  int total_num_docs_found = 0;

  if (collectionFormat == "warc"){
	  GetDefaultLogger().Log("dataset input format:CLUEWEB2009", false);
	  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i)
	  {
		GetDefaultLogger().Log("Processing: " + i->file_path(), false);
		int document_collection_buffer_len = i->Fill(&document_collection_buffer_, &document_collection_buffer_size_);

		i->set_initial_doc_id(doc_id_);
		int num_docs_parsed = parser_.ParseDocumentCollection(document_collection_buffer_, document_collection_buffer_len, doc_id_, avg_doc_length_);
		i->set_processed(true);
		GetDefaultLogger().Log("Parsed: " + Stringify(num_docs_parsed) + " documents.", false);
		i->set_final_doc_id(doc_id_ - 1);

		total_num_docs_found += num_docs_parsed;
	  }
  }
  else if (collectionFormat == "trec"){
	  GetDefaultLogger().Log("dataset input format:GOV2", false);
	  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i)
	  {
	    GetDefaultLogger().Log("Processing: " + i->file_path(), false);
	    int document_collection_buffer_len = i->Fill(&document_collection_buffer_, &document_collection_buffer_size_);
	    i->set_initial_doc_id(doc_id_);
	    int num_docs_parsed = parser_.ParseDocumentCollection(document_collection_buffer_, document_collection_buffer_len, doc_id_, avg_doc_length_);
	    i->set_processed(true);
	    GetDefaultLogger().Log("Parsed: " + Stringify(num_docs_parsed) + " documents.", false);
	    i->set_final_doc_id(doc_id_ - 1);
	    total_num_docs_found += num_docs_parsed;
	  }
  }
  else{
	  GetDefaultLogger().Log("Unknown NOT supported dataset format", true);
  }

  GetDefaultLogger().Log("Total number of documents parsed(total_num_docs_found): " + Stringify(total_num_docs_found), false);
  GetDefaultLogger().Log("Total number of documents indexed(doc_id_): " + Stringify(doc_id_), false);

  GetPostingCollectionController().Finish();
}

void CollectionIndexer::ProcessIncomingPostingStreamForPostingHit(){
	parser_.BuildInvertedIndexForPostingHits();
}

void CollectionIndexer::ProcessIncomingPostingStream(){
	cout << "currently, there is NOTHING here for the success of compiling." << endl;
	// ParseIncomingPostingStream1
	// currently using
	// parser_.ParseIncomingPostingStreamForFormat1();
	// ParseIncomingPostingStream2
	// similar one
	// parser_.ParseIncomingPostingStreamForFormat2();
	// ParseIncomingPostingStream3
	// parser_.ParseIncomingPostingStreamForFormat3();
	// ParseIncomingPostingStream4
	// parser_.ParseIncomingPostingStreamForFormat4();
	// Updated by Wei on 20140916
	// parser_.ParseIncomingPostingStreamForFormat5();
	// Updated by Wei on 20141009
	// parser_.BuildInvertedIndexForDocHits();
	// parser_.BuildInvertedIndexForDocHits();
	// Updated by Wei on 20141012
	// parser_.BuildInvertedIndexForDocHits();
	// parser_.BuildInvertedIndexForDocHitsMethod2();
	// Updated by Wei on 20141120
	// parser_.BuildInvertedIndexForPostingHitAndDocHit();
	// Updated by Wei on 20141205
	// parser_.BuildInvertedIndexForNormalizedPostingHit();
	// Updated by Wei on 20141212
	// parser_.BuildInvertedIndexForNormalizedPostingHit();
	// parser_.BuildInvertedIndexForDocHitsAndPostingHitHybridMethod_20141212();
	// Updated by Wei on 20141215
	parser_.BuildInvertedIndexForDocHitsAndPostingHitHybridMethod_withGlobalUPPAdded_20141215();
}

void CollectionIndexer::OutputDocumentCollectionDocIdRanges(const char* filename) {
  ofstream document_collections_doc_id_ranges_stream(filename);
  if (!document_collections_doc_id_ranges_stream) {
    GetErrorLogger().Log("Could not open '" + string(filename) + "' for writing.", true);
  }

  document_collections_doc_id_ranges_stream << "'Document Collection Filename'" << "\t" << "'Initial DocID'" << "\t" << "'Final DocID'" << "\n";
  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i) {
    if (i->processed())
      document_collections_doc_id_ranges_stream << i->file_path() << "\t" << i->initial_doc_id() << "\t" << i->final_doc_id() << "\n";
  }
  document_collections_doc_id_ranges_stream.close();
}

/**************************************************************************************************************************************************************
 * CollectionUrlExtractor
 *
 **************************************************************************************************************************************************************/
CollectionUrlExtractor::CollectionUrlExtractor() :
  document_collection_buffer_size_(atol(Configuration::GetConfiguration().GetValue(config_properties::kDocumentCollectionBufferSize).c_str())),
      document_collection_buffer_(new char[document_collection_buffer_size_]),
      parser_(Parser<DocUrlRetrievalParserCallback>::kManyDoc, GetAndVerifyDocType(), &parser_callback_), doc_id_(0), avg_doc_length_(0) {
  if (document_collection_buffer_size_ == 0)
    GetErrorLogger().Log("Check configuration setting for '" + string(config_properties::kDocumentCollectionBufferSize) + "'.", true);
}

CollectionUrlExtractor::~CollectionUrlExtractor() {
  delete[] document_collection_buffer_;
}

Parser<DocUrlRetrievalParserCallback>::DocType CollectionUrlExtractor::GetAndVerifyDocType() {
  string document_collection_format =
      Configuration::GetResultValue(Configuration::GetConfiguration().GetStringValue(config_properties::kDocumentCollectionFormat));

  Parser<DocUrlRetrievalParserCallback>::DocType doc_type = Parser<DocUrlRetrievalParserCallback>::GetDocumentCollectionFormat(document_collection_format.c_str());
  if (doc_type == Parser<DocUrlRetrievalParserCallback>::kNoSuchDocType) {
    Configuration::ErroneousValue(config_properties::kDocumentCollectionFormat, document_collection_format);
  }

  return doc_type;
}

void CollectionUrlExtractor::ParseTrec(const char* document_urls_filename) {
  int total_num_docs_found = 0;
  for (vector<DocumentCollection>::iterator i = doc_collections_.begin(); i != doc_collections_.end(); ++i) {
    GetDefaultLogger().Log("Processing: " + i->file_path(), false);

    int document_collection_buffer_len = i->Fill(&document_collection_buffer_, &document_collection_buffer_size_);

    i->set_initial_doc_id(doc_id_);
    int num_docs_parsed = parser_.ParseDocumentCollection(document_collection_buffer_, document_collection_buffer_len, doc_id_, avg_doc_length_);
    i->set_processed(true);
    GetDefaultLogger().Log("Found: " + Stringify(num_docs_parsed) + " documents.", false);
    i->set_final_doc_id(doc_id_ - 1);

    total_num_docs_found += num_docs_parsed;
  }

  GetDefaultLogger().Log("Total number of documents found: " + Stringify(total_num_docs_found), false);

  // Sort the URL and docID pairs.
  sort(parser_callback_.document_urls().begin(), parser_callback_.document_urls().end());

  // Write the new mapped docID, original docID, and URL of the sorted URL and docID pairs to a file.
  ofstream document_urls_stream(document_urls_filename);
  if (!document_urls_stream) {
    GetErrorLogger().Log("Could not open '" + string("document_urls") + "' for writing.", true);
  }

  uint32_t mapped_doc_id = 0;
  for (std::vector<std::pair<std::string, uint32_t> >::iterator i = parser_callback_.document_urls().begin(); i != parser_callback_.document_urls().end(); ++i) {
    document_urls_stream << mapped_doc_id << " " << i->second << " " << i->first << "\n";
    ++mapped_doc_id;
  }
  document_urls_stream.close();
}
