
#include "serializer.h++"
#include <boost/foreach.hpp>

using namespace WordTree;

typedef const UnserializedNode::childs_type& child_iter_type;

// remember, allocate_space invalidates real pointers (but not PointerTo)

Serializer::pointer_to_path Serializer::serialize_this_path(const std::string& string) {
	basepath_map::iterator iter = known_directories.find(string);
	if (iter != known_directories.end())
		return iter->second;

	pointer_to_path ret = paths_file->allocate_space<MemoryMappedPascalString>(
			MemoryMappedPascalString::get_needed_size(string));
	paths_file->get(ret).set_from(string);
	return ret;
}

Serializer::pointer_to_document Serializer::serialize_document(const UnserializedDocument& document) {
	pointer_to_document ret = paths_file->allocate_space<Document>();
	// these serialize_* will call allocate_space.
	pointer_to_path pathbase = serialize_this_path(document.get_dirpath());
	pointer_to_path filename = serialize_this_path(document.get_filename());
	Document& serialized_document = paths_file->get(ret);
	serialized_document.pathbase = pathbase;
	serialized_document.filename = filename;
	known_maps[&document] = ret;
	return ret;
}

Serializer::pointer_to_document Serializer::get_document_position(const UnserializedDocument& doc) {
	documents_map::iterator iter = known_maps.find(&doc);
	if (iter == known_maps.end())
		throw std::runtime_error("unknown document !");
	return iter->second;
}

PointerTo<PostingData> Serializer::serialize_postings(const mtdoc& matching) {
	if (matching.is_serialized())
		return matching.get_serialized_ptr();
	PostingData fakeposting(matching.get_nb_occurences());
	PointerTo<PostingData> d = postings_file->allocate_space<PostingData>(
			fakeposting.get_total_size());
	PostingData& postings = postings_file->get(d);
	postings.nb_occurences = fakeposting.nb_occurences;
	PostingData::posting_table_type postable(postings.getPostingTable());
	size_t i = 0;
	BOOST_FOREACH(size_t post, matching.get_postings()) {
		*postable.getElement(i) = post;
		i++;
	}

	PostingData::posting_table_type offsettable(postings.getOffsetTable());
	i = 0;
	BOOST_FOREACH(size_t post, matching.get_offsets()) {
		*offsettable.getElement(i) = post;
		i++;
	}
	return d;
}


PointerTo<DetailedDataHeader> Serializer::serialize_detailed_info(
		const WordTree::UnserializedNode& node) {


	DetailedDataHeader fake;
	fake.nb_docs = node.get_document_count();
	if (!fake.nb_docs)
		return PointerTo<DetailedDataHeader>();
	PointerTo<DetailedDataHeader> detailed_ptr = 
		moreinfo_file->allocate_space<DetailedDataHeader>(
				fake.get_total_size());

	// serialize the postings first, while we don't have any pointers
	// that could get invalidated if the posting file is the same
	// as the detailled header file. store the pointer so we can store
	// them when we can.
	std::vector<PointerTo<PostingData> > serialized_postings;
	typedef UnserializedNode::matching_document_type occurence;
	BOOST_FOREACH(const occurence& doc, node.get_documents()) {
		serialized_postings.push_back(serialize_postings(doc.second));
	}

	// now get the pointers. they aren't going to get invalidated now.
	DetailedDataHeader& detailed = moreinfo_file->get(detailed_ptr);
	detailed.nb_docs = fake.nb_docs;
	
	DetailedDataHeader::tfidf_table_type tfidfs(detailed.getTFIDFTable());
	DetailedDataHeader::document_table_type docs(
			detailed.getDocumentTable());
	DetailedDataHeader::posting_table_type postings(
				detailed.getPostingTable());
	size_t i = 0;
	BOOST_FOREACH(const occurence& doc, node.get_documents()) {
		pointer_to_document ptrdoc = get_document_position(*doc.first);
		*docs.getElement(i) = ptrdoc;
		tfidfs.getElement(i)->value = doc.second.tf_idf;
		*postings.getElement(i) = serialized_postings[i];
		i++;
	}

	return detailed_ptr;
}

PointerTo<NodeHeader> Serializer::serialize_node_header(const UnserializedNode& node) {
	NodeHeader optimized = node.get_optimized_hashtable_parameter();

	size_t serialized_size = optimized.get_size_of_structure();
	PointerTo<NodeHeader> our_serialized_ptr =
			wordtree_file->allocate_space<NodeHeader>(
					serialized_size);
	{
		NodeHeader& serialized_node = wordtree_file->get(
					our_serialized_ptr);
		serialized_node = optimized;
		HashTable node_hashtable(serialized_node);

		if (moreinfo_file) {
			PointerTo<DetailedDataHeader> detailed_ptr;
			detailed_ptr = serialize_detailed_info(node);
			serialized_node.get_detailed_data() = detailed_ptr;
		}
	}

	BOOST_FOREACH(child_iter_type pair, node.get_childs()) {
		PointerTo<NodeHeader> serialized_ptr;
		// this will call allocate_ptr, which will invalidate our
		// pointers on each loop ! it sucks.
		serialized_ptr = serialize_node_header(*pair.second);

		HashTable hashtable(wordtree_file->get(our_serialized_ptr));
		PointerTo<NodeHeader>& ptr = *hashtable.get_entry(pair.first);
		ptr = serialized_ptr;
	}
	return our_serialized_ptr;
}
