#include "InvertedIndex.h"
#include "SharedFileName.h"
#include <fstream>
#include <iostream>
#include <string>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <set>
#include <sstream>
#include <map>
#include <vector>
#include <queue>  
#include <algorithm>
#include <time.h>
#include <math.h>
#include <unordered_set>
#include <sys/time.h> 
using namespace std;

struct TimeStruct{
	double total_time;
	double shortest_path_time;
	double text_time;
	int count;

	int num_docs;
	int num_containers; 
	int num_empty_containers;

	TimeStruct(){
		ClearStruct();
	}

	void SetTime(double new_total, double new_short_path, int _num_docs, int _num_containers=0, int _num_empty_containers=0){
		total_time += new_total;
		shortest_path_time += new_short_path;
		num_docs += _num_docs;
		num_containers += _num_containers;
		num_empty_containers += _num_empty_containers;
		count++;
	}

	void ClearStruct(){
		total_time = 0;
		shortest_path_time = 0;
		text_time = 0;
		count = 0;
		num_docs= 0;
		num_containers = 0;
		num_empty_containers = 0;
	}

	void ComputeStruct(){
		total_time /= count;
		shortest_path_time /= count;
		text_time = total_time - shortest_path_time;
		num_docs /= count;
		num_containers /= count;
		num_empty_containers /= count;
	}

	string ToString(){
		stringstream ss;
		ss << text_time << " | " << shortest_path_time << " | " << num_docs << " | " << num_containers << " | " << num_empty_containers;
		return ss.str();
	}
};
TimeStruct performance_result[6];
void ClearStructArray(){
	for(int i = 0; i < 6; i++) performance_result[i].ClearStruct();
}
void ComputeStructArray(){
	for(int i = 0; i < 6; i++) performance_result[i].ComputeStruct();
}


InvertedIndex::InvertedIndex(string in_dir,int time_partition,Graph* graph){
	this->alpha = 1;
	this->beta = 1;
	this->gamma = 1;

	this->project_dir = in_dir;
	this->social_graph = graph;
	this->time_partition_id = time_partition;
	number_docs = 0;
	latest_tweet_time = 0;
	oldest_tweet_time = 0;

	LoadKeyword(this->keyword_map,this->keyword_idf,this->project_dir);

	/*priority_queue<pair<string,double>, vector<pair<string,double> >, pair_second_greater<string,double> > keyword_idf_queue;
	for(auto it = keyword_map.begin(); it != keyword_map.end(); it++){
		keyword_idf_queue.push(make_pair(it->first,keyword_idf[it->second]));
	}
	for(int i = 0; i < 30; i++){
		pair<string,double> elem = keyword_idf_queue.top();
		cout << elem.first << " " << keyword_map[elem.first] << " " << static_cast<int>(1.0/elem.second) << endl;
		keyword_idf_queue.pop();
	}
	exit(1);*/

	LoadForwardLists();
	LoadFreqInterval();
	this->LoadBaselineInvertedLists();
	this->LoadNormalInvertedLists();
}

InvertedIndex::~InvertedIndex(){
	delete social_graph;
}

vector<int> InvertedIndex::SampleKeyword(int keyword_freq_degree){
	unordered_set<int> query_keywords;
	// choose from inverted lists with size larger than 1000
	int begin = 0;
	for(;begin < this->inverted_lists.size(); begin++){
		if(this->keyword_idf_rank[begin].first >= 1000) break;
	}

	if(keyword_freq_degree == 1){ // medium range, top 10000
		begin = this->inverted_lists.size() - 1000;
	}else if(keyword_freq_degree == 2){
		begin = this->inverted_lists.size() - 20;
	}

	int range = this->inverted_lists.size() - begin;

	int rand_decision = rand()%6;
	int num_keywords = 0;
	if(rand_decision < 3)  num_keywords= 1;
	else if(rand_decision < 5) num_keywords = 2;
	else num_keywords = 3;
	for(int i = 0; i < num_keywords; i++){
		int rand_keyword = -1;
		do{
			rand_keyword = keyword_idf_rank[begin + rand()%range].second;
		}while(query_keywords.find(rand_keyword) != query_keywords.end());
		query_keywords.insert(rand_keyword);
	}
	vector<int> result(query_keywords.begin(),query_keywords.end());
	return result;
}

void InvertedIndex::LoadForwardLists(){
	forward_lists.clear(); // make sure the forward list is empty

	sqlite3 *db;
	char *zErrMsg = 0;
	int rc;

	string db_name(this->project_dir);
	db_name.append(sample_database_name);

	cout << "opening db_name: " << db_name << endl;

	/* Open database */
	rc = sqlite3_open(db_name.c_str(), &db);
	if( rc ){
		fprintf(stderr, "Can't open database: %s\n", sqlite3_errmsg(db));
		exit(0);
	}else{
		fprintf(stdout, "Opened database successfully\n");
	}
	rc = sqlite3_exec(db, "PRAGMA synchronous = OFF", NULL, NULL, &zErrMsg);
	SQLerror(rc,zErrMsg);
	
	rc = sqlite3_exec(db, "BEGIN TRANSACTION", NULL, NULL, &zErrMsg);
	SQLerror(rc,zErrMsg);

	
	// load the tweet table first
	stringstream sql;
	//sql << "select * from " << doc_table_name << ";";
	sql << "select * from " << doc_table_name << " where " << time_partition_name << "=" << this->time_partition_id << " and " << doc_id_name << "< 12000000" << ";";
	sqlite3_stmt *statement;
	cout << "load tweet doc query: " << sql.str() << endl;
	int count = 0;

	if (sqlite3_prepare(db, sql.str().c_str(), -1, &statement, 0) == SQLITE_OK ) 
	{
		int res = 0;
		while ( 1 )         
		{
			res = sqlite3_step(statement);

			if ( res == SQLITE_ROW ) 
			{

				ForwardList new_list;
				new_list.doc_id = atoi((char*)sqlite3_column_text(statement,0)); 
				new_list.user_id = atoi((char*)sqlite3_column_text(statement,1));

				// get the time
				struct tm tminfo;
				char* token; 

				token = strtok((char*)sqlite3_column_text(statement,2)," -:");  tminfo.tm_year = atoi(token);
				token = strtok(NULL," -:");  tminfo.tm_mon = atoi(token);
				token = strtok(NULL," -:");  tminfo.tm_mday = atoi(token);
				token = strtok(NULL," -:");  tminfo.tm_hour = atoi(token);
				token = strtok(NULL," -:");  tminfo.tm_min = atoi(token);
				token = strtok(NULL," -:");	tminfo.tm_sec = atoi(token);

				tminfo.tm_year -= 1900;
				tminfo.tm_mon -= 1;

				new_list.doc_time = mktime (&tminfo);
				if(this->oldest_tweet_time == 0) oldest_tweet_time = new_list.doc_time;
				if(new_list.doc_time > this->latest_tweet_time) this->latest_tweet_time = new_list.doc_time;

				this->forward_lists.push_back(new_list);
				count++;
				if(count % 1000000 == 0) cout << "doc table " << count << " rows" << endl;
			}
			if ( res == SQLITE_DONE || res==SQLITE_ERROR)    
			{
				cout << "done " << endl;
				break;
			}   
		}
	}
	cout << "finish reading doc table" << endl;
	sql.str(string());
	//sql << "select * from " << doc_term_table_name << ";";
	sql << "select * from " << doc_term_table_name << " where " << time_partition_name << "=" <<  this->time_partition_id << " and " << doc_id_name << "< 12000000" << ";";
	cout << "load tweet term query: " << sql.str() << endl;
	count = 0;
	if (sqlite3_prepare(db, sql.str().c_str(), -1, &statement, 0) == SQLITE_OK ) 
	{
		int res = 0;
		while ( 1 )         
		{
			res = sqlite3_step(statement);
			if ( res == SQLITE_ROW ) 
			{
				int index = atoi((char*)sqlite3_column_text(statement,0));
				int keyword_id = atoi((char*)sqlite3_column_text(statement,1));
			
				stringstream temp_ss; // to get term frequency
				temp_ss << (char*)sqlite3_column_text(statement,2);
				double keyword_freq;
				temp_ss >> keyword_freq;

				forward_lists[index].keyword_id_tf[keyword_id] = keyword_freq;
				count++;
				if(count % 1000000 == 0) cout << "term table " << count / 1000000 << " x1M rows" << endl;
			}
			
			if ( res == SQLITE_DONE || res==SQLITE_ERROR)    
			{
				cout << "done " << endl;
				break;
			}    
		}
	}
	rc = sqlite3_exec(db, "END TRANSACTION", NULL, NULL, &zErrMsg);
	SQLerror(rc,zErrMsg);

	sqlite3_close(db);
	cout << "finish reading term table" << endl;

	// update number of documents
	number_docs += this->forward_lists.size();
	cout << "number of documents: " << number_docs << endl;
	cout << "number of terms: " << count << endl;
	char buff[20];
	strftime(buff, 20, "%Y-%m-%d %H:%M:%S", localtime(&this->latest_tweet_time));
	cout << "latest time: " << buff << endl;
	strftime(buff, 20, "%Y-%m-%d %H:%M:%S", localtime(&this->oldest_tweet_time));
	cout << "oldest time: " << buff << endl;
}

void InvertedIndex::LoadFreqInterval(){
	this->freq_interval_val.clear();
	string freq_file = this->project_dir;
	ifstream freqFile(freq_file.append(sample_freq_interval).c_str());
	if(!freqFile.is_open()){
		cout << "Cannot open freq interval file: " << freq_file << endl;
		exit(1);
	}

	double frequency;
	while(freqFile >> frequency){
		freq_interval_val.push_back(frequency);
	}

	freqFile.close();
	cout << "finish reading frequency interval information" << endl; 
}

void InvertedIndex::ClearInvertedLists(){
	for(int kid = 0; kid < this->inverted_lists.size(); kid++){
		InvertedList& list2delete = inverted_lists[kid];
		for(auto it = list2delete.inv3dindex.begin(); it != list2delete.inv3dindex.end(); it++){
			for(int i = 0; i < this->social_graph->GetNumPartition(); i++){
				delete[] (*it)[i];
			}
			delete[] (*it);
		}
		list2delete.inv3dindex.clear();
	}
	this->inverted_lists.clear();
}

void InvertedIndex::LoadInvertedLists(int time_slice_size){
	

	ClearInvertedLists(); // clear the inverted list for multiple time partition
	clock_t start = clock();
	// statistics
	vector<int> num_slices_per_keyword(keyword_map.size(),0); // statistics to keep number of slices per keyword
	vector<int> num_invlist_size_per_keyword(keyword_map.size(),0); // statistics to keep number of keyword per keyword

	vector<int> latest_slice_doc_count(keyword_map.size(),0);

	inverted_lists.resize(keyword_map.size()); // each keyword create an inverted index
	for(int i = 0; i < inverted_lists.size(); i++){
		inverted_lists[i].min_dist_to_partition_pivot.resize(this->social_graph->GetNumPartition(),100000);
		inverted_lists[i].max_dist_to_partition_pivot.resize(this->social_graph->GetNumPartition(),0);
		inverted_lists[i].list_count = 0;
		inverted_lists[i].frequency_empty.resize(this->freq_interval_val.size(),true);
	}

	int container_size = sizeof(new list<InvertedListEnt>());
	int slice_count = 0;
	int num_keywords = 0;

	for(int did = 0; did < this->forward_lists.size(); did++){ // did is the document id
	
		ForwardList& list = forward_lists[did];
		int partition_id = this->social_graph->GetUserPartition(list.user_id);
		int dist2pivot = this->social_graph->GetPointToPivotDist(partition_id,list.user_id); // get the doc to pivot distance


		// add each keyword in the inverted list that it belongs to
		for(auto it = list.keyword_id_tf.begin(); it != list.keyword_id_tf.end(); it++){
			int freq_interval_idx = GetFreqInterval(it->second);
			int keyword_id = it->first;
			InvertedList& list4insert = inverted_lists[keyword_id];
			num_keywords++;

			// create new slice
			if(list4insert.inv3dindex.empty() || latest_slice_doc_count[keyword_id] == time_slice_size){	
				CreateNewSlice(list4insert);
				num_slices_per_keyword[keyword_id]++;
				latest_slice_doc_count[keyword_id] = 0;
				slice_count++;
			}

			// insert the element
			Container3D& container2insert = (*(list4insert.inv3dindex.begin()))[partition_id][freq_interval_idx];
			container2insert.push_back(InvertedListEnt(&list,it->second));
			//(*(list4insert.inv3dindex.begin()))[partition_id][freq_interval_idx].doc_set.insert(InvertedListEnt(&list,it->second,dist2pivot));

			// update distance information for triangle rule
			int dist_to_partition_pivot = this->social_graph->GetPointToPivotDist(partition_id,list.user_id);
			if(dist_to_partition_pivot > list4insert.max_dist_to_partition_pivot[partition_id])
				list4insert.max_dist_to_partition_pivot[partition_id] = dist_to_partition_pivot;
			if(dist_to_partition_pivot < list4insert.min_dist_to_partition_pivot[partition_id])
				list4insert.min_dist_to_partition_pivot[partition_id] = dist_to_partition_pivot;

			// this is because forward list are sorted by time, newly added tweet has later time
			list4insert.time_interval[0] = list.doc_time;
			num_invlist_size_per_keyword[keyword_id]++;
			latest_slice_doc_count[keyword_id]++;
			list4insert.list_count++;
			list4insert.frequency_empty[freq_interval_idx] = false;
		}
		if(did % 1000000 ==0) cout << "processed: " << did/1000000 << "xM docs" << endl;
	}


	cout << "time to build inverted list: " << ((double)clock()-start)/CLOCKS_PER_SEC << endl;
	cout << "slice number: " << slice_count << endl;
	cout << "total memory: " << slice_count * container_size <<  endl;
	cout << "number of documents: " << this->forward_lists.size() << endl;
	cout << "number of keywords: " << num_keywords << endl;
	cout << "number of distinct keywords: " << this->keyword_idf.size() << endl;
	
	//map<int,int> container_dist;
	//for(int i = 0; i < this->inverted_lists.size(); i++){
	//	for(auto it = this->inverted_lists[i].inv3dindex.begin(); it != this->inverted_lists[i].inv3dindex.end(); it++){
	//		for(int x = 0; x < this->social_graph->GetNumPartition(); x++){
	//			for(int y = 0; y < this->freq_interval_val.size(); y++){
	//				int container_size = (*it)[x][y].size();
	//				auto dist_it = container_dist.find(container_size);
	//				if(dist_it == container_dist.end()){
	//					container_dist[container_size] = 1;
	//				}else{
	//					dist_it->second++;
	//				}
	//			}
	//		}
	//	}
	//}
	//cout << "container distribution" << endl;
	//int temp_count = 0;
	//for(auto it = container_dist.begin(); it != container_dist.end(); it++){
	//	cout << (it->first) << " " << it->second << endl;
	//	temp_count++;
	//	if(temp_count == 10) break;
	//}

	//int max_idx = -1;
	//int max_count = -1;
	//for(int i = 0; i < this->inverted_lists.size(); i++){
	//	if(this->inverted_lists[i].list_count > max_count){
	//		max_count = this->inverted_lists[i].list_count;
	//		max_idx = i;
	//	}
	//}
	//cout << "max inverted list: " << max_idx << " with size:" << max_count << endl;
	//for(int i = 0; i < this->social_graph->GetNumPartition(); i++){
	//	cout << this->inverted_lists[max_idx].min_dist_to_partition_pivot[i] << " " << this->inverted_lists[max_idx].max_dist_to_partition_pivot[i] << endl;
	//}
	//getchar();
	
	// output statistics
	/*sort(num_slices_per_keyword.begin(),num_slices_per_keyword.end());
	sort(num_invlist_size_per_keyword.begin(),num_invlist_size_per_keyword.end());*/

	/*int pre_val = -1;
	int total_count = 0;
	vector<pair<int,int> > histo;
	for(int i = 0; i < num_slices_per_keyword.size(); i++){
		if(num_slices_per_keyword[i] != pre_val){
			histo.push_back(make_pair(num_slices_per_keyword[i],1));
			pre_val = num_slices_per_keyword[i];
		}else{
			histo[histo.size()-1].second++;
		}
		total_count++;
	}
	cout << "in total: " << total_count << endl;
	cout << "slice distribution" << endl;
	for(int i = 0; i < histo.size(); i++){
		cout << histo[i].first << " " << histo[i].second << endl;
	}*/

	//vector<int> doc_histo;
	//int max = -1;
	//for(int i = 0; i < num_invlist_size_per_keyword.size(); i++){
	//	int index = num_invlist_size_per_keyword[i] / 1000;
	//	if(index > max) max = index;
	//}
	//doc_histo.resize(max+1,0);
	//for(int i = 0; i < num_invlist_size_per_keyword.size(); i++){
	//	int index = num_invlist_size_per_keyword[i] / 1000;
	//	doc_histo[index]++;
	//}
	//cout << "inverted list size distribution" << endl;
	//for(int i = 0; i < doc_histo.size(); i++){
	//	if(doc_histo[i] != 0){
	//		cout << (i*1000) << " " << doc_histo[i] << endl;
	//	}
	//}
	//getchar();
}

// to create a new time slice for inverted list
void InvertedIndex::CreateNewSlice(InvertedList& list4insert){
	int num_partition = this->social_graph->GetNumPartition();

	//cout << "num_partition: " << num_partition << endl;
	//cout << "num_freq_interval: " << freq_interval_val.size() << endl;

	Container3D** new_slice = new Container3D*[num_partition];
	for(int i = 0; i < num_partition; i++){
		new_slice[i] = new Container3D[this->freq_interval_val.size()];
	}
	list4insert.inv3dindex.push_front(new_slice);
	list4insert.time_interval.insert(list4insert.time_interval.begin(),0);
	list4insert.slice_iter_vec.insert(list4insert.slice_iter_vec.begin(),list4insert.inv3dindex.begin());
}

int InvertedIndex::GetFreqInterval(double frequency){
	// binary search on the freqency value
	int start_idx = 0;
	int end_idx = this->freq_interval_val.size();
	int mid_idx;

	while(start_idx < end_idx){
		mid_idx = (start_idx + end_idx) / 2;
		if(frequency > this->freq_interval_val[mid_idx]){
			end_idx = mid_idx;
			continue;
		}

		if(mid_idx < this->freq_interval_val.size() - 1){
			if(frequency < this->freq_interval_val[mid_idx+1]){
				start_idx = mid_idx;
				continue;
			}
		}
		break;
	}
	return mid_idx;
}

struct NodeApproximateDistRank{
	int est_dist;
	int nearest_neighbor_dist;
	int diff_val;
	list<InvertedListEnt>::iterator ent_it;

	NodeApproximateDistRank(int _est_dist, int _nearest_neighbor_dist, list<InvertedListEnt>::iterator _ent_it){
		est_dist = _est_dist;
		nearest_neighbor_dist = _nearest_neighbor_dist;
		ent_it = _ent_it;
		diff_val = est_dist - nearest_neighbor_dist;
	}

	bool operator<(const NodeApproximateDistRank& another) const
	{
		return diff_val > another.diff_val;
	}
};

void InvertedIndex::SingleKeywordQuery(int keyword_idx, int user_idx, int topK,  int op_level){
	time_t max_time_diff = this->latest_tweet_time - this->oldest_tweet_time;

	int user_partition = this->social_graph->GetUserPartition(user_idx);
	int num_partitions = this->social_graph->GetNumPartition();
	InvertedList& searching_list = this->inverted_lists[keyword_idx];

	// sort the partition for this keyword
	vector<int> user2partition_dist = this->social_graph->GetParitionDist(user_partition);

	vector<int> dist_to_partition_pivot(num_partitions);
	for(int i = 0; i < num_partitions; i++) dist_to_partition_pivot[i] = this->social_graph->GetPointToPivotDist(i,user_idx);

	// user triangle ineqaulity to further esitimate the partition disk
	for(int i = 0; i < num_partitions; i++){
		int estimate_dist = dist_to_partition_pivot[i] - searching_list.max_dist_to_partition_pivot[i];
		if(estimate_dist > user2partition_dist[i]) user2partition_dist[i] = estimate_dist;
		estimate_dist = searching_list.min_dist_to_partition_pivot[i] - dist_to_partition_pivot[i];
		if(estimate_dist > user2partition_dist[i]) user2partition_dist[i] = estimate_dist;
	}

	// sort the partition by the distance
	vector<pair<int,int> > social_sort_vec(num_partitions);
	for(int i = 0; i < num_partitions; i++) social_sort_vec[i] = make_pair(user2partition_dist[i],i);
	sort(social_sort_vec.begin(),social_sort_vec.end());

	cout << "keyword: " << keyword_idx << " with size: "<< searching_list.list_count << endl;
	cout << "number of container: " << (searching_list.slice_iter_vec.size() * num_partitions * this->freq_interval_val.size()) << endl;

	//cout << "frequency empty check: " << endl;
	//for(int i = 0; i < this->freq_interval_val.size(); i++){
	//	cout << "frequency start: " << freq_interval_val[i] << " is empty: " << searching_list.frequency_empty[i] << endl;
	//}

	timeval t1, t2;
	gettimeofday(&t1, NULL);
	double elapsedTime = 0;

	timeval t3, t4;
	double elemTime = 0;

	//unordered_set<int> evaluated_docs;	// documents that have been evaluated
	priority_queue<ResultEnt> topKcandidate;		// top K candidate
	priority_queue<ContainerEnt> container2test;	// the container to test
	vector<bool> tested_container(searching_list.inv3dindex.size()*num_partitions*this->freq_interval_val.size(),false);
	double current_topK = 0;

	// push the best container on
	Container3D* next_container = &((*(searching_list.inv3dindex.begin()))[social_sort_vec[0].second][0]);
	ContainerEnt best_container_ent(0,0,0,next_container);
	SingleKeywordEstimateContainer(best_container_ent,searching_list,max_time_diff,social_sort_vec);

	container2test.push(best_container_ent);
	tested_container[0] = true; // mark first one is tested

	priority_queue<NodeApproximateDistRank> node_dist_rank;
	int phases = 0;
	int size_threshold = 100;

	int container_count = 0;
	int evaluated_doc_count = 0; // count number of evaluated docs
	int empty_container = 0;
	while(!container2test.empty()){
		ContainerEnt top_container_ent = container2test.top();
		if(top_container_ent.best_value < current_topK) break; // we can stop here
	
		container_count++;
	/*	cout << "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" << endl;
		cout << "evaluated container: " << container_count << endl;
		cout << "evalutated doc: " << evaluated_docs.size() << endl;
		cout << "current_topK: " << current_topK << endl;
		cout << "current top K size: " << topKcandidate.size() << endl;
		cout << "the top_container_ent idx: " << top_container_ent.time_dim << " " << social_sort_vec[top_container_ent.social_dim].second << " " << top_container_ent.freq_dim << " "
			<< " with best score: " << top_container_ent.best_value << " " << "with container size: " << top_container_ent.container->size() << endl;
		cout << "queue size: "<< container2test.size() << endl;
		cout << "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" << endl << endl;*/
		container2test.pop();
		Container3D* top_container = top_container_ent.container;
		empty_container += (top_container->size() == 0);
		gettimeofday(&t3, NULL);

		for(auto it = top_container->begin(); it!= top_container->end(); it++){
			// evaluate every entry of the container
			ForwardList* target_doc = it->doc;
			/*cout << "query user: " << user_idx<< endl;
			cout << "document id: " << target_doc.doc_id << " with user id: " << target_doc.user_id << endl;*/
			//if(evaluated_docs.find(target_doc.doc_id) != evaluated_docs.end()) continue; // if the doc has been evaluated
			evaluated_doc_count++;

			double time_score = (double)(target_doc->doc_time - this->oldest_tweet_time) / max_time_diff;
			double freq_score = it->tf;

			// estimate first, can early terminate 
			int partition_id = social_sort_vec[top_container_ent.social_dim].second;
			//int point2user_est = dist_to_partition_pivot[partition_id] - this->social_graph->GetPointToPivotDist(partition_id,user_idx);
			//int point2user_est = it->dist2pivot - dist_to_partition_pivot[partition_id];
			//int social_est = point2user_est > social_sort_vec[top_container_ent.social_dim].first ? point2user_est : social_sort_vec[top_container_ent.social_dim].first;
			//double social_score_est = (double)1.0- (double)social_est/this->social_graph->GetMaxSocialDistance();
			//if(this->GetRankingSocre(time_score,social_score_est,freq_score) <= current_topK) break;
			
			// distance computation improvement
			if(phases == 0){
				int est1 = this->social_graph->GetPointToPivotDist(partition_id,it->doc->user_id) + dist_to_partition_pivot[partition_id];
				int est2 = dist_to_partition_pivot[user_partition] + this->social_graph->GetPointToPivotDist(user_partition,target_doc->user_id);
				node_dist_rank.push(NodeApproximateDistRank(min(est1,est2),this->social_graph->GetNearestNeighborDist(target_doc->user_id),it));
				if(node_dist_rank.size() == size_threshold){
					phases = 1;
				}
				continue;
			}
			if(phases == 1){
				while(!node_dist_rank.empty()){
					list<InvertedListEnt>::iterator ent_it = node_dist_rank.top().ent_it;
					ForwardList* t_doc = ent_it->doc;
					double t_score = (double)(t_doc->doc_time - this->oldest_tweet_time) / max_time_diff;
					double f_score = ent_it->tf;
					double m_rel = this->GetDistThreshold(current_topK,t_score,f_score);
					double s_score = this->social_graph->GetSocialRelevance(t_doc->user_id,m_rel,op_level);
					if(s_score > 0){
						ResultEnt candidate;
						candidate.doc_id = t_doc->doc_id;
						candidate.rank_value = this->GetRankingSocre(t_score,s_score,f_score);
						if(topKcandidate.size() < topK || candidate.rank_value > current_topK)
							topKcandidate.push(candidate);
						if(topKcandidate.size() > topK)
							topKcandidate.pop();
						if(topKcandidate.size() == topK)
							current_topK = topKcandidate.top().rank_value;
					}
					node_dist_rank.pop();
				}
				phases++;
			}

			double min_relevance = this->GetDistThreshold(current_topK,time_score,freq_score);
			double social_score = this->social_graph->GetSocialRelevance(target_doc->user_id,min_relevance,op_level);

			if(social_score > 0){ // exceed the topK
				ResultEnt candidate;
				candidate.doc_id = target_doc->doc_id;
				candidate.rank_value = this->GetRankingSocre(time_score,social_score,freq_score);

				if(topKcandidate.size() < topK || candidate.rank_value > current_topK)
					topKcandidate.push(candidate);
				if(topKcandidate.size() > topK)
					topKcandidate.pop();
				if(topKcandidate.size() == topK)
					current_topK = topKcandidate.top().rank_value;
			}
			//evaluated_docs.insert(target_doc.doc_id);
		}
		gettimeofday(&t4, NULL);
		elemTime += (double)(t4.tv_sec - t3.tv_sec) * 1000;
		elemTime += (double)(t4.tv_usec - t3.tv_usec) / 1000;   // us to ms 

		/*cout << "expended: ----------------------------- " << endl;*/
		// expand the time dimension
		int container_idx = top_container_ent.time_dim * num_partitions * this->freq_interval_val.size() + 
			top_container_ent.social_dim * this->freq_interval_val.size() + top_container_ent.freq_dim;

		if(top_container_ent.time_dim < searching_list.time_interval.size()-1){
			int next_container_idx = container_idx + num_partitions * this->freq_interval_val.size();
			if(!tested_container[next_container_idx]){
				int time_idx = top_container_ent.time_dim + 1;
				int social_idx = top_container_ent.social_dim;
				int freq_idx = top_container_ent.freq_dim;
				tested_container[next_container_idx] = true;

				next_container =  &((*(searching_list.slice_iter_vec[time_idx]))[social_sort_vec[social_idx].second][freq_idx]);
				ContainerEnt next_container_ent(time_idx,social_idx,freq_idx,next_container);
				SingleKeywordEstimateContainer(next_container_ent,searching_list,max_time_diff,social_sort_vec);
				if(next_container_ent.best_value > current_topK){
					container2test.push(next_container_ent);
				}
			}
		}
		// expand the social dimension
		if(top_container_ent.social_dim < num_partitions - 1){
			int next_container_idx = container_idx + this->freq_interval_val.size();
			if(!tested_container[next_container_idx]){
				int time_idx = top_container_ent.time_dim;
				int social_idx = top_container_ent.social_dim + 1;
				int freq_idx = top_container_ent.freq_dim;
				tested_container[next_container_idx] = true;

				next_container =  &((*(searching_list.slice_iter_vec[time_idx]))[social_sort_vec[social_idx].second][freq_idx]);
				ContainerEnt next_container_ent(time_idx,social_idx,freq_idx,next_container);
				SingleKeywordEstimateContainer(next_container_ent,searching_list,max_time_diff,social_sort_vec);
				if(next_container_ent.best_value > current_topK){
					container2test.push(next_container_ent);
				}
			}
			
		}
		// expand the frequency dimension
		if(top_container_ent.freq_dim < this->freq_interval_val.size()-1){
			int next_container_idx = container_idx + 1;
			if(!tested_container[next_container_idx]){
				int time_idx = top_container_ent.time_dim;
				int social_idx = top_container_ent.social_dim;
				int freq_idx = top_container_ent.freq_dim + 1;
				tested_container[next_container_idx] = true;

				next_container =  &((*(searching_list.slice_iter_vec[time_idx]))[social_sort_vec[social_idx].second][freq_idx]);
				ContainerEnt next_container_ent(time_idx,social_idx,freq_idx,next_container);
				SingleKeywordEstimateContainer(next_container_ent,searching_list,max_time_diff,social_sort_vec);
				if(next_container_ent.best_value > current_topK){
					container2test.push(next_container_ent);
				}
			}
		}
	}

	gettimeofday(&t2, NULL);
	elapsedTime = (t2.tv_sec - t1.tv_sec) * 1000.0;      // sec to ms
    elapsedTime += (t2.tv_usec - t1.tv_usec) / 1000.0;   // us to ms
    cout << elapsedTime << " ms" << endl;
	cout << "elem time: " << elemTime / 1000.0 << endl;
	cout << "shortest path: " << this->social_graph->GetElapsedTime() << endl;
	int performance_idx = op_level == 2 ? 0 : 1;
	performance_result[performance_idx].SetTime(elapsedTime,this->social_graph->GetElapsedTime(),evaluated_doc_count,container_count,empty_container);

	cout << "number of evaluated doc: " << evaluated_doc_count << endl;
	cout << "number of evaluated container: " << container_count << endl;
	cout << "query user: " << user_idx << endl;
	cout << "empty container: " << empty_container << " ratio: " << (double)empty_container/container_count << endl;
	while(!topKcandidate.empty()){
		const ResultEnt& result = topKcandidate.top();
		cout << "doc: " << result.doc_id << " with value: " << result.rank_value << " ";
		int doc_owner = forward_lists[result.doc_id].user_id;
		cout << "doc owner: " << doc_owner << " ";
		//cout << "social rel: " << doc_owner << " with relevance: " << this->social_graph->GetSocialRelevance(doc_owner,0) << endl;
		cout << "social distance: " << (1.0-this->social_graph->GetSocialRelevance(doc_owner,0,op_level))*this->social_graph->GetMaxSocialDistance() << endl;

		topKcandidate.pop();
	}
}

void InvertedIndex::SingleKeywordEstimateContainer(
	ContainerEnt& container_ent,
	InvertedList& searching_list,
	time_t& max_time_diff, 
	vector<pair<int,int> >& social_sort_vec){

	double time_score = (double)(searching_list.time_interval[container_ent.time_dim] - this->oldest_tweet_time)/max_time_diff;
	double social_score = (double)1.0 - (double)social_sort_vec[container_ent.social_dim].first/this->social_graph->GetMaxSocialDistance();
	double freq_score = this->freq_interval_val[container_ent.freq_dim];
	container_ent.best_value = this->GetRankingSocre(time_score,social_score,freq_score);
}

void InvertedIndex::LoadBaselineInvertedLists(){
	this->base_line_inverted_lists.resize(this->keyword_map.size());

	for(int did = 0; did < this->forward_lists.size(); did++){
		ForwardList& list = forward_lists[did];
		for(auto it = list.keyword_id_tf.begin(); it != list.keyword_id_tf.end(); it++){
			int keyword_id = it->first;
			base_line_inverted_lists[keyword_id].push_front(InvertedListEnt(&list,it->second));
		}
		if(did % 1000000 == 0) cout << "finished " << did/1000000 << "M docs" << endl;
	}
}

void InvertedIndex::LoadNormalInvertedLists(){
	this->normal_inverted_lists.resize(this->keyword_map.size());

	for(int did = 0; did < this->forward_lists.size(); did++){
		ForwardList& list = forward_lists[did];
		for(auto it = list.keyword_id_tf.begin(); it != list.keyword_id_tf.end(); it++){
			int keyword_id = it->first;
			normal_inverted_lists[keyword_id].insert(InvertedListEnt(&list,it->second));
		}
		if(did % 1000000 == 0) cout << "finished " << did/1000000 << "M docs for normal inverted lists" << endl;
	}
}

void InvertedIndex::SingleKeywordNavive(int keyword_idx, int user_idx, int topK, int op_level){
	time_t max_time_diff = this->latest_tweet_time - this->oldest_tweet_time;
	list<InvertedListEnt>& list4searching = this->base_line_inverted_lists[keyword_idx];

	priority_queue<ResultEnt> topKcandidate;
	cout << "navive keyword: " << keyword_idx << " with keyword size: " << list4searching.size() << endl;

	timeval t1, t2;
	gettimeofday(&t1, NULL);
	double elapsedTime = 0;

	int pos_count = 0;
	for(auto doc_it = list4searching.begin(); doc_it != list4searching.end(); doc_it++){
		ForwardList* doc_foward_list = doc_it->doc;

		double time_score = ((double)(doc_foward_list->doc_time - this->oldest_tweet_time))/max_time_diff;
		double freq_score = doc_it->tf;
		double social_score = this->social_graph->GetSocialRelevance(doc_foward_list->user_id,0,op_level);
		
		ResultEnt new_res;
		new_res.doc_id = doc_foward_list->doc_id;
		new_res.rank_value = this->GetRankingSocre(time_score,social_score,freq_score);
		topKcandidate.push(new_res);

		pos_count++;
	}

	gettimeofday(&t2, NULL);
	elapsedTime = (t2.tv_sec - t1.tv_sec) * 1000.0;      // sec to ms
    elapsedTime += (t2.tv_usec - t1.tv_usec) / 1000.0;   // us to ms
    cout << "single keyword naive takes: " << elapsedTime << " ms" << endl;

	while(topKcandidate.size() > topK) topKcandidate.pop();
	cout << "true top K" << endl;
	while(!topKcandidate.empty()){
		ResultEnt result = topKcandidate.top();
		cout << "doc: " << result.doc_id << " with value: " << result.rank_value << " ";
		int doc_owner = forward_lists[result.doc_id].user_id;
		cout << "social rel: " << doc_owner << " with relevance: " << this->social_graph->GetSocialRelevance(doc_owner,0,op_level) << endl;
		topKcandidate.pop();
	}
}

void InvertedIndex::SingleKeywordQueryBaseLineInvertedIndex(int keyword_idx, int user_idx, int topK, int op_level){
	
	timeval t1, t2;
	gettimeofday(&t1, NULL);
	double elapsedTime = 0;

	time_t max_time_diff = this->latest_tweet_time - this->oldest_tweet_time;
	list<InvertedListEnt>& list4searching = this->base_line_inverted_lists[keyword_idx];
	
	priority_queue<ResultEnt> topKcandidate;
	double currentTopK = 0;

	int evaluated_doc_count = 0; // count number of evaluated docs
	cout << "keyword: " << keyword_idx << " with keyword size: " << list4searching.size() << endl;
	double time_test = 1;
	for(auto doc_it = list4searching.begin(); doc_it != list4searching.end(); doc_it++){
		ForwardList* doc_foward_list = doc_it->doc;
		double time_score = ((double)(doc_foward_list->doc_time - this->oldest_tweet_time))/max_time_diff;

		if(time_test < time_score){
			cout << "time is not decresing: " << endl;
			cout << "time_test: " << time_test << " vs. time_score:" << time_score << endl;
		}
		time_test = time_score;

		// termination check
		if(this->GetRankingSocre(time_score,1,1) < currentTopK){
			cout << "evaluated_doc_count: " << evaluated_doc_count << endl;
			break;
		}

		evaluated_doc_count++;

		double freq_score = doc_it->tf;
		double min_relevance = this->GetDistThreshold(currentTopK,time_score,freq_score);
		double social_score = this->social_graph->GetSocialRelevance(doc_foward_list->user_id,min_relevance,op_level);

		if(social_score > 0){
			double total_score = this->GetRankingSocre(time_score,social_score,freq_score);

			if(topKcandidate.size() < topK || total_score > currentTopK){
				ResultEnt new_res;
				new_res.doc_id = doc_foward_list->doc_id;
				new_res.rank_value = total_score;
				topKcandidate.push(new_res);
			}
			if(topKcandidate.size() > topK) topKcandidate.pop();
			if(topKcandidate.size() == topK) currentTopK = topKcandidate.top().rank_value;
		}
	}

	gettimeofday(&t2, NULL);
	elapsedTime = (t2.tv_sec - t1.tv_sec) * 1000.0;      // sec to ms
    elapsedTime += (t2.tv_usec - t1.tv_usec) / 1000.0;   // us to ms
    cout << elapsedTime << " ms" << endl;
	cout << "shortest path: " << this->social_graph->GetElapsedTime() << endl;
	int performance_idx = op_level==2 ? 2 : 3;
	performance_result[performance_idx].SetTime(elapsedTime,this->social_graph->GetElapsedTime(),evaluated_doc_count);

	cout << "number of evaluated doc: " << evaluated_doc_count << endl;
	while(!topKcandidate.empty()){
		ResultEnt result = topKcandidate.top();
		cout << "doc: " << result.doc_id << " with value: " << result.rank_value << " ";
		int doc_owner = forward_lists[result.doc_id].user_id;
		cout << "social rel: " << doc_owner << " with relevance: " << this->social_graph->GetSocialRelevance(doc_owner,0,op_level) << endl;
		topKcandidate.pop();
	}
}

void InvertedIndex::MultiKeywordQuery(vector<int> keyword_idxes, int user_idx, int topK, int op_level){
	time_t max_time_diff = this->latest_tweet_time - this->oldest_tweet_time;
	int num_query_keywords = keyword_idxes.size();
	if(num_query_keywords <= 1){
		cout << "this is not a multi-keyword query" << endl;
	}
	
	int num_partitions = this->social_graph->GetNumPartition();
	int user_partition = this->social_graph->GetUserPartition(user_idx);
	int num_frequency = this->freq_interval_val.size();
	vector<int>& user2partition_dist = this->social_graph->GetParitionDist(user_partition);

	// compute idf sum constant for the query
	double idf_sum_cons = 0;
	vector<double> query_keyword_idf(num_query_keywords);
	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];
		query_keyword_idf[i] = log((double)1.0 + (double)this->number_docs/this->inverted_lists[keyword_idx].list_count);
		idf_sum_cons += query_keyword_idf[i] * query_keyword_idf[i];
	}
	idf_sum_cons = sqrt(idf_sum_cons);

	// find a document's container
	/*vector<vector<int> > found_matched_container(num_query_keywords);
	for(int i = 0; i < num_query_keywords; i++){
		InvertedList & list4search = this->inverted_lists[keyword_idxes[i]];

		for(int x = 0; x < list4search.slice_iter_vec.size(); x++){
			for(int y = 0; y < num_partitions; y++){
				for(int z = 0; z < num_frequency; z++){
					for(auto it = (*(list4search.slice_iter_vec[x]))[y][z].doc_set.begin(); it != (*(list4search.slice_iter_vec[x]))[y][z].doc_set.end(); it++){
						if(it->doc->doc_id == 963995){
							cout << "at keyword " << keyword_idxes[i] << ":(" << x << "," << y << "," << z << ")" << endl;
							found_matched_container[i].push_back(x);
							found_matched_container[i].push_back(y);
							found_matched_container[i].push_back(z);
						}
					}
				}
			}
		}
	}*/

	// for each keyword, perform a sort on social dimension
	vector<vector<pair<int,int > > > social_order_vec(num_query_keywords); // first element is the distance, second element is the index
	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];
		social_order_vec[i].resize(num_partitions);

		vector<int>& min_dist2keyword_partition = this->inverted_lists[keyword_idx].min_dist_to_partition_pivot;
		vector<int>& max_dist2keyword_partition = this->inverted_lists[keyword_idx].max_dist_to_partition_pivot;

		for(int j = 0; j < num_partitions; j++){
			social_order_vec[i][j].second = j; // index
			social_order_vec[i][j].first = user2partition_dist[j];

			// use min and max to estimate
			int est_dist = this->social_graph->GetPointToPivotDist(j,user_idx) - max_dist2keyword_partition[j];
			if(est_dist > social_order_vec[i][j].first) social_order_vec[i][j].first = est_dist;

			est_dist = min_dist2keyword_partition[j] -  this->social_graph->GetPointToPivotDist(j,user_idx);
			if(est_dist > social_order_vec[i][j].first) social_order_vec[i][j].first = est_dist;
		}
		sort(social_order_vec[i].begin(),social_order_vec[i].end());
		/*cout << "keyword: " << keyword_idx << ":";
		for(int j = 0; j < num_partitions; j++) cout << "(" << social_order_vec[i][j].second << "," << social_order_vec[i][j].first <<") |";
		cout << endl;*/
	}

	// for each keyword, perform sort on frequency dimension
	vector<vector<pair<double,int> > > freq_order_vec(num_query_keywords);
	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];
		freq_order_vec[i].resize(num_frequency);

		InvertedList& searchlist = this->inverted_lists[keyword_idx];
		for(int j = 0; j < num_frequency; j++){
			freq_order_vec[i][j].second = j; // index
			if(searchlist.frequency_empty[j]) freq_order_vec[i][j].first = -1000000;
			else freq_order_vec[i][j].first = this->freq_interval_val[j];
		}
		sort(freq_order_vec[i].begin(),freq_order_vec[i].end(),pair_greater<double,int>());	
		/*cout << "keyword frequency: " << keyword_idx << ":";
		for(int j = 0; j < num_frequency; j++) cout << "(" << freq_order_vec[i][j].second << "," << freq_order_vec[i][j].first <<") |";
		cout << endl;*/
	}


	double currentTopK = 0;
	priority_queue<ResultEnt> topKcandidate; // the top K candidate document that matched with the query
	
	vector<vector<int> > keywords_dim_size(num_query_keywords); // keep track of the dimension for each keyword, although social and text dimension does not change 
	vector<priority_queue<ContainerEnt> > keywords_container2test(num_query_keywords); // each keyword has a priority queue
	set<pair<double,int>,pair_greater<double,int> > keyword_queue; // rank the best of different keyword, first value is the estimate value and second is the keyword idx

	vector<vector<bool> > keywords_tested_container(num_query_keywords); // keep track wether the container has been tested or not
	unordered_set<int> visited_docs; // check which documents have been evaluted
	
	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];

		// set the tested container to be all false
		keywords_tested_container[i].resize(this->inverted_lists[keyword_idx].time_interval.size() * num_partitions * num_frequency,false);

		// get the inverted list
		InvertedList& keyword_list = this->inverted_lists[keyword_idx];
		
		// add the first cube in the priority queue
		Container3D* container2add = &((*(keyword_list.slice_iter_vec[0]))[social_order_vec[i][0].second][freq_order_vec[i][0].second]);
		ContainerEnt init_container(0,0,0,container2add);
		MultiKeywordEstimateContainer(init_container,keyword_list,query_keyword_idf[i],idf_sum_cons,max_time_diff,social_order_vec[i],freq_order_vec[i],num_query_keywords);
		keywords_container2test[i].push(init_container);
		
		// insert into the global queue
		keyword_queue.insert(make_pair(init_container.best_value,i));

		// fill up dimension information
		keywords_dim_size[i].resize(3);
		keywords_dim_size[i][0] = keyword_list.time_interval.size();
		keywords_dim_size[i][1] = num_partitions;
		keywords_dim_size[i][2] = num_frequency;
	}

	priority_queue<NodeApproximateDistRank> node_dist_rank;
	int phases = 0;
	int size_threshold = 500*num_query_keywords;
	int num_evaluated_containers = 0;
	int num_empty_containers = 0;

	timeval t1, t2;
	gettimeofday(&t1, NULL);
	double elapsedTime = 0;

	while(!keyword_queue.empty()){
		pair<double,int> best_keyword_to_extend = *(keyword_queue.begin());
		keyword_queue.erase(keyword_queue.begin()); // erase the best one, need to insert the updated one

		if(best_keyword_to_extend.first < 0){
			cout << "the inverted lists from all keywords are exhausted" << endl;
			break;
		}

		if(best_keyword_to_extend.first * num_query_keywords < currentTopK){ // found the topK
			cout << "find the topK" << endl;
			break; 
		}
		int keyword_i = best_keyword_to_extend.second;
		num_evaluated_containers++;


		// to evaluate the element in this container
		ContainerEnt best_container_ent = keywords_container2test[keyword_i].top();
		keywords_container2test[keyword_i].pop();

		Container3D* best_container3d = best_container_ent.container;
		num_empty_containers += (best_container3d->size() == 0);

		for(auto it = best_container3d->begin(); it != best_container3d->end(); it++){
			if(visited_docs.find(it->doc->doc_id) != visited_docs.end()) continue; // this document has been evaluated
			visited_docs.insert(it->doc->doc_id);

			double time_score = (double)(it->doc->doc_time-this->oldest_tweet_time)/max_time_diff;

			
			// estimate first 
			pair<int,int>& part_social_dist = social_order_vec[keyword_i][best_container_ent.social_dim];
			int partition_id = part_social_dist.second;
			//int est_dist = it->dist2pivot - this->social_graph->GetPointToPivotDist(partition_id,user_idx);
			//est_dist = est_dist > part_social_dist.first ? est_dist : part_social_dist.first;
			//double est_social = (double)1.0- (double)est_dist/this->social_graph->GetMaxSocialDistance();
			//double est_freq = it->tf * query_keyword_idf[keyword_i];
			//for(int k = 0; k < num_query_keywords; k++){
			//	if(k != keyword_i) est_freq += query_keyword_idf[k]; // estimate the tf as 1
			//}
			//est_freq /= idf_sum_cons;
			//if(est_freq > 1.0) est_freq = 1.0;
			//double estimate = this->GetRankingSocre(time_score,est_social,est_freq);
			//if(estimate < currentTopK) break;

			// distance computation improvement
			if(phases == 0){
				int est1 = this->social_graph->GetPointToPivotDist(partition_id,it->doc->user_id) + this->social_graph->GetPointToPivotDist(partition_id,user_idx);
				int est2 = this->social_graph->GetPointToPivotDist(user_partition,user_idx) + this->social_graph->GetPointToPivotDist(user_partition,it->doc->user_id);
				node_dist_rank.push(NodeApproximateDistRank(min(est1,est2),this->social_graph->GetNearestNeighborDist(it->doc->user_id),it));
				if(node_dist_rank.size() == size_threshold){
					phases = 1;
				}
				continue;
			}
			if(phases == 1){
				while(!node_dist_rank.empty()){
					list<InvertedListEnt>::iterator ent_it = node_dist_rank.top().ent_it;
					ForwardList* t_doc = ent_it->doc;
					double t_score = (double)(t_doc->doc_time - this->oldest_tweet_time) / max_time_diff;
					double f_score = 0;
					for(int k = 0; k < num_query_keywords; k++){
						if(ent_it->doc->keyword_id_tf.find(keyword_idxes[k]) != ent_it->doc->keyword_id_tf.end())
							f_score += ent_it->doc->keyword_id_tf[keyword_idxes[k]] * query_keyword_idf[k];
					}


					f_score /= idf_sum_cons;

					

					double m_rel = this->GetDistThreshold(currentTopK,t_score,f_score);
					double s_score = this->social_graph->GetSocialRelevance(t_doc->user_id,m_rel,op_level);

					//if(ent_it->doc->doc_id == 979811){
					//	cout << "phases: " << phases << " author: " << ent_it->doc->user_id << endl;
					//	cout << "t_score: " << t_score << " f_score: " << f_score << " currentTopK: " << currentTopK << endl;
					//	cout << "minimum relevance: " << m_rel << endl;
					//	cout << "2hop score: " << s_score << " with dist: " << ((double)1.0-s_score)*this->social_graph->GetMaxSocialDistance() << endl;
					//	cout << "correct social score: " << ((double)1.0-this->social_graph->GetSocialRelevance(t_doc->user_id,0,false))*this->social_graph->GetMaxSocialDistance() << endl;
					//	getchar();
					//}

					if(s_score > 0){
						ResultEnt candidate;
						candidate.doc_id = t_doc->doc_id;
						candidate.rank_value = this->GetRankingSocre(t_score,s_score,f_score);
						if(topKcandidate.size() < topK || candidate.rank_value > currentTopK)
							topKcandidate.push(candidate);
						if(topKcandidate.size() > topK)
							topKcandidate.pop();
						if(topKcandidate.size() == topK)
							currentTopK = topKcandidate.top().rank_value;
					}
					node_dist_rank.pop();
				}
				phases++;
			}


			// evaluate the true value
			double freq_score = 0;
			for(int k = 0; k < num_query_keywords; k++){
				if(it->doc->keyword_id_tf.find(keyword_idxes[k]) != it->doc->keyword_id_tf.end())
					freq_score += it->doc->keyword_id_tf[keyword_idxes[k]] * query_keyword_idf[k];
			}
			freq_score /= idf_sum_cons;
			
			double min_relevance = this->GetDistThreshold(currentTopK,time_score,freq_score);
			double social_score = this->social_graph->GetSocialRelevance(it->doc->user_id,min_relevance,op_level);

			if(social_score > 0){ // exceed the topK
				ResultEnt candidate;
				candidate.doc_id = it->doc->doc_id;
				candidate.rank_value = this->GetRankingSocre(time_score,social_score,freq_score);

				if(topKcandidate.size() < topK || candidate.rank_value > currentTopK)
					topKcandidate.push(candidate);
				if(topKcandidate.size() > topK)
					topKcandidate.pop();
				if(topKcandidate.size() == topK)
					currentTopK = topKcandidate.top().rank_value;
			}
		}

		// extend the 3 directions
		InvertedList& keyword_list = this->inverted_lists[keyword_idxes[keyword_i]];
		for(int j = 0; j < 3; j++){
			int dim_val[] = {best_container_ent.time_dim,best_container_ent.social_dim,best_container_ent.freq_dim};
			dim_val[j] += 1;
			if(dim_val[j] >= keywords_dim_size[keyword_i][j]) continue;
			int container_idx = dim_val[0] * num_partitions * num_frequency + dim_val[1] * num_frequency + dim_val[2];
			if(keywords_tested_container[keyword_i][container_idx]) continue;
			

			Container3D* container2add = &((*(keyword_list.slice_iter_vec[dim_val[0]]))[social_order_vec[keyword_i][dim_val[1]].second][freq_order_vec[keyword_i][dim_val[2]].second]);

			// for efficient cube traverse only text dimension
			if(j == 2){
				auto it = keyword_list.slice_iter_vec[dim_val[0]];
				int social_dim = social_order_vec[keyword_i][dim_val[1]].second;
				Container3D* freq_container_array = (*it)[social_dim];

				bool should_compute = true; // checks if this direction has been evaluated
				while(container2add->empty()){
					dim_val[j]++;
					if(dim_val[j] >= keywords_dim_size[keyword_i][j]){
						should_compute = false;
						break;
					}
					container_idx++;
					if(keywords_tested_container[keyword_i][container_idx]){
						should_compute = false;
						break;
					}
					container2add = &(freq_container_array[freq_order_vec[keyword_i][dim_val[2]].second]);
				}
				if(!should_compute) continue;
			}

			ContainerEnt extend_container(dim_val[0],dim_val[1],dim_val[2],container2add);
			MultiKeywordEstimateContainer(extend_container,keyword_list,query_keyword_idf[keyword_i],idf_sum_cons,max_time_diff,social_order_vec[keyword_i],freq_order_vec[keyword_i],num_query_keywords);
			keywords_tested_container[keyword_i][container_idx] = true;

			double estimate_container_score = extend_container.best_value * num_query_keywords;
			if(estimate_container_score > currentTopK) keywords_container2test[keyword_i].push(extend_container);
		}

		// update the best value for this keyword
		if(!keywords_container2test[keyword_i].empty()){
			keyword_queue.insert(make_pair(keywords_container2test[keyword_i].top().best_value,keyword_i));
		}

	} // main loop

	gettimeofday(&t2, NULL);
	elapsedTime = (t2.tv_sec - t1.tv_sec) * 1000.0;      // sec to ms
    elapsedTime += (t2.tv_usec - t1.tv_usec) / 1000.0;   // us to ms
    cout << "total query time: " << elapsedTime << " ms" << endl;
	cout << "shortest path: " << this->social_graph->GetElapsedTime() << endl;
	cout << "num of evaluated container: " << num_evaluated_containers << endl;
	cout << "num of empty container: " << num_empty_containers << endl;
	int performance_idx = op_level==2 ? 0 : 1;
	performance_result[performance_idx].SetTime(elapsedTime,this->social_graph->GetElapsedTime(),visited_docs.size(),num_evaluated_containers,num_empty_containers);

	cout << "number of evaluated doc: " << visited_docs.size() << endl;
	while(!topKcandidate.empty()){
		const ResultEnt& result = topKcandidate.top();
		cout << "doc: " << result.doc_id << " with value: " << result.rank_value << " ";
		int doc_owner = forward_lists[result.doc_id].user_id;
		cout << "doc contain keywords: ";
		for(int i = 0 ; i < num_query_keywords; i++){
			int keyword = keyword_idxes[i];
			if(forward_lists[result.doc_id].keyword_id_tf.find(keyword) != forward_lists[result.doc_id].keyword_id_tf.end()){
				cout << keyword << " ";
			}
		}
		cout << "social distance: " << (1.0-this->social_graph->GetSocialRelevance(doc_owner,0,op_level))*this->social_graph->GetMaxSocialDistance() << endl;

		topKcandidate.pop();
	}

}

void InvertedIndex::MultiKeywordEstimateContainer(ContainerEnt& container_ent, InvertedList& searching_list, double keyword_idf, double idf_sum_cons,
		time_t& max_time_diff, vector<pair<int,int> >& social_order_vec, vector<pair<double,int > >& freq_order_vec, int num_query_keywords){

	double time_score = (double)(searching_list.time_interval[container_ent.time_dim]-this->oldest_tweet_time)/max_time_diff;
	double social_score = (double)1.0 - ((double)social_order_vec[container_ent.social_dim].first)/this->social_graph->GetMaxSocialDistance();
	double text_score = keyword_idf * freq_order_vec[container_ent.freq_dim].first / idf_sum_cons;
	container_ent.best_value = (alpha*time_score + beta*social_score) / num_query_keywords + gamma*text_score;
}

void InvertedIndex::MultiKeywordNavive(vector<int> keyword_idxes, int user_idx,int topK, int op_level){
	time_t max_time_diff = this->latest_tweet_time - this->oldest_tweet_time;
	int num_query_keywords = keyword_idxes.size();
	//if(num_query_keywords <= 1){
	//	cout << "this is not a multi-keyword query" << endl;
	//	exit(1);
	//}

	int num_partitions = this->social_graph->GetNumPartition();
	int user_partition = this->social_graph->GetUserPartition(user_idx);
	int num_frequency = this->freq_interval_val.size();

	// compute idf sum constant for the query
	double idf_sum_cons = 0;
	vector<double> query_keyword_idf(num_query_keywords);
	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];
		query_keyword_idf[i] = log((double)1.0 + (double)this->number_docs/this->inverted_lists[keyword_idx].list_count);
		idf_sum_cons += query_keyword_idf[i] * query_keyword_idf[i];
	}
	idf_sum_cons = sqrt(idf_sum_cons);
	

	priority_queue<ResultEnt> topKcandidate;
	unordered_set<int> visited_docs;
	double currentTopK = 0;

	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];
		list<InvertedListEnt>& search_list = this->base_line_inverted_lists[keyword_idx];

		for(auto it = search_list.begin(); it != search_list.end(); it++){
			int doc_id = it->doc->doc_id;
			if(visited_docs.find(doc_id) != visited_docs.end()) continue;
			visited_docs.insert(doc_id);

			double time_score = (double)(it->doc->doc_time-this->oldest_tweet_time)/max_time_diff;
			double freq_score = 0;

			// compute cosine similarity
			for(int k = 0; k < num_query_keywords; k++){
				if(it->doc->keyword_id_tf.find(keyword_idxes[k]) != it->doc->keyword_id_tf.end()){
					freq_score += it->doc->keyword_id_tf[keyword_idxes[k]] * query_keyword_idf[k]; // tf * idf
				}
			}
			freq_score = freq_score / idf_sum_cons;

			double social_score = this->social_graph->GetSocialRelevance(it->doc->user_id,0,op_level);
			
			ResultEnt candidate;
			candidate.doc_id = it->doc->doc_id;
			candidate.rank_value = this->GetRankingSocre(time_score,social_score,freq_score);

			if(topKcandidate.size() < topK || candidate.rank_value > currentTopK)
				topKcandidate.push(candidate);
			if(topKcandidate.size() > topK)
				topKcandidate.pop();
			if(topKcandidate.size() == topK)
				currentTopK = topKcandidate.top().rank_value;
		}
	}
	cout << "shortest path: " << this->social_graph->GetElapsedTime() << endl;
	cout << "topK so far: " << currentTopK << endl;
	cout << "number of evaluated doc: " << visited_docs.size() << endl;
	cout << "query user: " << user_idx << endl;
	while(!topKcandidate.empty()){
		const ResultEnt& result = topKcandidate.top();
		cout << "doc: " << result.doc_id << " with value: " << result.rank_value << " ";
		int doc_owner = forward_lists[result.doc_id].user_id;
		cout << "doc contain keywords: ";
		for(int i = 0 ; i < num_query_keywords; i++){
			int keyword = keyword_idxes[i];
			if(forward_lists[result.doc_id].keyword_id_tf.find(keyword) != forward_lists[result.doc_id].keyword_id_tf.end()){
				cout << keyword << " ";
			}
		}
		cout << "social distance: " << (1.0-this->social_graph->GetSocialRelevance(doc_owner,0,op_level))*this->social_graph->GetMaxSocialDistance() << endl;

		topKcandidate.pop();
	}


}

void InvertedIndex::MultiKeywordNaviveBaseLineInvertedIndex(vector<int> keyword_idxes, int user_idx,int topK, int op_level){
	time_t max_time_diff = this->latest_tweet_time - this->oldest_tweet_time;
	int num_query_keywords = keyword_idxes.size();

	// compute idf sum constant for the query
	double idf_sum_cons = 0;
	vector<double> query_keyword_idf(num_query_keywords);
	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];
		query_keyword_idf[i] = log((double)1.0 + (double)this->number_docs/this->inverted_lists[keyword_idx].list_count);
		idf_sum_cons += query_keyword_idf[i] * query_keyword_idf[i];
	}
	idf_sum_cons = sqrt(idf_sum_cons);

	timeval t1, t2;
	gettimeofday(&t1, NULL);
	double elapsedTime = 0;

	double currentTopK = 0;
	priority_queue<ResultEnt> topKcandidate; // the top K candidate document that matched with the query
	vector<list<InvertedListEnt>::iterator > inverted_lists_iter;
	// insert the iterator for each keywords
	for(int i = 0; i < num_query_keywords; i++){
		inverted_lists_iter.push_back(this->base_line_inverted_lists[keyword_idxes[i]].begin());
	}
	unordered_set<int> evaluated_docs;

	cout << "entered loop: " << endl;
	while(true){
		time_t most_fresh = -1;
		int best_keyword_idx = -1;
		for(int i = 0; i < num_query_keywords; i++){
			if(inverted_lists_iter[i]->doc->doc_time > most_fresh && inverted_lists_iter[i] != this->base_line_inverted_lists[keyword_idxes[i]].end()){
				most_fresh = inverted_lists_iter[i]->doc->doc_time;
				best_keyword_idx = i;
			}
		}
		
		if(best_keyword_idx == -1){
			cout << "exhaust all keyword inverted index" << endl;
			break;
		}

		auto it = inverted_lists_iter[best_keyword_idx];
		inverted_lists_iter[best_keyword_idx]++;

		if(evaluated_docs.find(it->doc->doc_id) != evaluated_docs.end()) continue;
		evaluated_docs.insert(it->doc->doc_id);

		// make estimate of the current doc
		double time_score = (double)(most_fresh - this->oldest_tweet_time)/max_time_diff;
		if(this->GetRankingSocre(time_score,1,1) < currentTopK) break;


		double freq_score = 0;
		for(int k = 0; k < num_query_keywords; k++){
			if(it->doc->keyword_id_tf.find(keyword_idxes[k]) != it->doc->keyword_id_tf.end())
				freq_score += it->doc->keyword_id_tf[keyword_idxes[k]] * query_keyword_idf[k];
		}
		freq_score /= idf_sum_cons;

		double min_relevance = this->GetDistThreshold(currentTopK,time_score,freq_score);
		double social_score = this->social_graph->GetSocialRelevance(it->doc->user_id,min_relevance,op_level);

		if(social_score > 0){ // exceed the topK
			ResultEnt candidate;
			candidate.doc_id = it->doc->doc_id;
			candidate.rank_value = this->GetRankingSocre(time_score,social_score,freq_score);

			if(topKcandidate.size() < topK || candidate.rank_value > currentTopK)
				topKcandidate.push(candidate);
			if(topKcandidate.size() > topK)
				topKcandidate.pop();
			if(topKcandidate.size() == topK)
				currentTopK = topKcandidate.top().rank_value;
		}
		
	}

	gettimeofday(&t2, NULL);
	elapsedTime = (t2.tv_sec - t1.tv_sec) * 1000.0;      // sec to ms
    elapsedTime += (t2.tv_usec - t1.tv_usec) / 1000.0;   // us to ms
    cout << "total query time: " << elapsedTime << " ms" << endl;
	cout << "shortest path query time: " << this->social_graph->GetElapsedTime() << " ms" << endl;
	int performance_idx = op_level == 2 ? 2 : 3;
	performance_result[performance_idx].SetTime(elapsedTime,this->social_graph->GetElapsedTime(),evaluated_docs.size());

	cout << "number of evaluated doc: " << evaluated_docs.size() << endl;
	while(!topKcandidate.empty()){
		const ResultEnt& result = topKcandidate.top();
		cout << "doc: " << result.doc_id << " with value: " << result.rank_value << " ";
		int doc_owner = forward_lists[result.doc_id].user_id;
		cout << "doc contain keywords: ";
		for(int i = 0 ; i < num_query_keywords; i++){
			int keyword = keyword_idxes[i];
			if(forward_lists[result.doc_id].keyword_id_tf.find(keyword) != forward_lists[result.doc_id].keyword_id_tf.end()){
				cout << keyword << " ";
			}
		}
		cout << "social distance: " << (1.0-this->social_graph->GetSocialRelevance(doc_owner,0,op_level))*this->social_graph->GetMaxSocialDistance() << endl;

		topKcandidate.pop();
	}
}

void InvertedIndex::MultiKeywordNormalBaseLineInvertedIndex(vector<int> keyword_idxes, int user_idx, int topK, int op_level){
	time_t max_time_diff = this->latest_tweet_time - this->oldest_tweet_time;
	int num_query_keywords = keyword_idxes.size();

	timeval t1, t2;
	gettimeofday(&t1, NULL);
	double elapsedTime = 0;

	// compute idf sum constant for the query
	double idf_sum_cons = 0;
	vector<double> query_keyword_idf(num_query_keywords);
	for(int i = 0; i < num_query_keywords; i++){
		int keyword_idx = keyword_idxes[i];
		query_keyword_idf[i] = log((double)1.0 + (double)this->number_docs/this->inverted_lists[keyword_idx].list_count);
		idf_sum_cons += query_keyword_idf[i] * query_keyword_idf[i];
	}
	idf_sum_cons = sqrt(idf_sum_cons);

	double currentTopK = 0;
	priority_queue<ResultEnt> topKcandidate; // the top K candidate document that matched with the query
	vector<set<InvertedListEnt,InvertedListEntSortedFreq>::iterator > inverted_lists_iter;

	// insert the iterator for each keywords
	for(int i = 0; i < num_query_keywords; i++){
		inverted_lists_iter.push_back(this->normal_inverted_lists[keyword_idxes[i]].begin());
	}
	unordered_set<int> evaluated_docs;

	while(true){
		double best_tf = -1;
		int best_keyword_idx = -1;

		for(int i = 0; i < num_query_keywords; i++){
			if(inverted_lists_iter[i]->tf > best_tf && inverted_lists_iter[i] != this->normal_inverted_lists[keyword_idxes[i]].end()){
				best_tf = inverted_lists_iter[i]->tf;
				best_keyword_idx = i;
			}
		}

		if(best_keyword_idx == -1){
			cout << "exhaust all keyword inverted index" << endl;
			break;
		}

		auto it = inverted_lists_iter[best_keyword_idx];
		inverted_lists_iter[best_keyword_idx]++;

		if(evaluated_docs.find(it->doc->doc_id) != evaluated_docs.end()) continue;
		evaluated_docs.insert(it->doc->doc_id);

		// derived a upper bound for unseen documents
		double freq_estimate = 0;
		for(int i = 0; i < num_query_keywords; i++){
			freq_estimate += inverted_lists_iter[i]->tf * query_keyword_idf[i];
		}
		if(freq_estimate > 1) freq_estimate = 1;

		if(this->GetRankingSocre(1,1,freq_estimate) < currentTopK) break;


		double time_score = (double)(it->doc->doc_time - this->oldest_tweet_time)/ max_time_diff;

		double freq_score = 0;
		for(int k = 0; k < num_query_keywords; k++){
			if(it->doc->keyword_id_tf.find(keyword_idxes[k]) != it->doc->keyword_id_tf.end())
				freq_score += it->doc->keyword_id_tf[keyword_idxes[k]] * query_keyword_idf[k];
		}
		freq_score /= idf_sum_cons;

		double min_relevance = this->GetDistThreshold(currentTopK,time_score,freq_score);
		double social_score = this->social_graph->GetSocialRelevance(it->doc->user_id,min_relevance,op_level);

		if(social_score > 0){ // exceed the topK
			ResultEnt candidate;
			candidate.doc_id = it->doc->doc_id;
			candidate.rank_value = this->GetRankingSocre(time_score,social_score,freq_score);

			if(topKcandidate.size() < topK || candidate.rank_value > currentTopK)
				topKcandidate.push(candidate);
			if(topKcandidate.size() > topK)
				topKcandidate.pop();
			if(topKcandidate.size() == topK)
				currentTopK = topKcandidate.top().rank_value;
		}
		
	}

	gettimeofday(&t2, NULL);
	elapsedTime = (t2.tv_sec - t1.tv_sec) * 1000.0;      // sec to ms
    elapsedTime += (t2.tv_usec - t1.tv_usec) / 1000.0;   // us to ms
    cout << "total query time: " << elapsedTime << " ms" << endl;
	cout << "shortest path query time: " << this->social_graph->GetElapsedTime() << " ms" << endl;
	int performance_idx = op_level == 2 ? 4 : 5;
	performance_result[performance_idx].SetTime(elapsedTime,this->social_graph->GetElapsedTime(),evaluated_docs.size());
	cout << "number of evaluated docs: " << evaluated_docs.size() << endl;

	while(!topKcandidate.empty()){
		const ResultEnt& result = topKcandidate.top();
		cout << "doc: " << result.doc_id << " with value: " << result.rank_value << " ";
		int doc_owner = forward_lists[result.doc_id].user_id;
		cout << "doc contain keywords: ";
		for(int i = 0 ; i < num_query_keywords; i++){
			int keyword = keyword_idxes[i];
			if(forward_lists[result.doc_id].keyword_id_tf.find(keyword) != forward_lists[result.doc_id].keyword_id_tf.end()){
				cout << keyword << " ";
			}
		}
		cout << "social distance: " << (1.0-this->social_graph->GetSocialRelevance(doc_owner,0,op_level))*this->social_graph->GetMaxSocialDistance() << endl;

		topKcandidate.pop();
	}
}

struct ParamStruct{
	double alpha;
	double beta;
	double gamma;

	ParamStruct(double a, double b, double c){
		alpha = a; beta = b; gamma = c;
	}

	bool operator<(const ParamStruct& another) const
	{
		if(alpha < another.alpha) return true;
		else if(alpha > another.alpha) return false;

		if(beta < another.beta) return true;
		else if(beta > another.beta) return false;

		if(gamma < another.gamma) return true;
		else if(gamma > another.gamma) return false;
	}

	
};

void InvertedIndex::TestSuit(int time_slice_size)
{
	LoadInvertedLists(time_slice_size);
	cout << "finish loading" << endl;
	keyword_idf_rank.clear();
	for(int i = 0; i < this->inverted_lists.size(); i++) this->keyword_idf_rank.push_back(make_pair(this->inverted_lists[i].list_count,i));
	sort(keyword_idf_rank.begin(),keyword_idf_rank.end());

	int number_of_experiments = 20;

	vector<ParamStruct> possible_param;
	/*possible_param.push_back(ParamStruct(1,3,5));
	possible_param.push_back(ParamStruct(1,5,3));
	possible_param.push_back(ParamStruct(3,1,5));
	possible_param.push_back(ParamStruct(3,5,1));
	possible_param.push_back(ParamStruct(5,1,3));
	possible_param.push_back(ParamStruct(5,3,1));*/
	for(int i = 1; i <= 10; i++)
	{
		possible_param.push_back(ParamStruct(1,i,1));
	}

	double alpha =1;
	double beta = 1;
	double gamma = 1;
	
	int keyword_idx = 0;
	int size_count = 0;

	vector<pair<int,int> > keyword_idf_rank;
	for(int i = 0; i < inverted_lists.size(); i++){
		if(inverted_lists[i].list_count > size_count){
			size_count = inverted_lists[i].list_count;
		}
		keyword_idf_rank.push_back(make_pair(inverted_lists[i].list_count,i));
	}
	sort(keyword_idf_rank.begin(),keyword_idf_rank.end());

	vector<int> keyword_idxes;
	//keyword_idxes.push_back(inverted_index.GetKeywordIdx("good"));
	//keyword_idxes.push_back(inverted_index.GetKeywordIdx("time"));
	/*keyword_idxes.push_back(inverted_index.GetKeywordIdx("play"));
	keyword_idxes.push_back(inverted_index.GetKeywordIdx("bball"));*/
	
	unsigned int seed = 1398754707;
	srand(seed);
	
	ofstream perform_file("Performance.txt",ios::app);
	perform_file << "time slice: " << time_slice_size << endl;
	
	vector<int> user_degree;
	vector<vector<int> > query_keywords;
	for(int i = 0; i < number_of_experiments; i++){
		user_degree.push_back(social_graph->SampleUser(1));
		query_keywords.push_back(SampleKeyword(2));
	}

	vector<int> input_vec;
	//input_vec.push_back(1);
	//for(int i = 5; i <= 50; i+= 5) input_vec.push_back(i);
	input_vec.push_back(5);

	for(auto it = input_vec.begin(); it < input_vec.end(); it++){
		int input = *it;
	//for(int freq = 0; freq < 3; freq++){
	//for(int degree = 0; degree < 3; degree++){

	/*for(auto it = possible_param.begin(); it < possible_param.end(); it++){
	SetParameter(it->alpha,it->beta,it->gamma);*/
	

	//int input = 5;	
	for(int i = 0; i < number_of_experiments; i++){
		
		//keyword_idxes = inverted_index.SampleKeyword(freq);
		keyword_idxes = query_keywords[i]; 
		int user_idx = user_degree[i]; 
		//int user_idx = social_graph->SampleUser(degree);

		for(int k = 0; k < keyword_idxes.size(); k++){
			cout << "keyword: " << keyword_idxes[k] << " with size: " << inverted_lists[keyword_idxes[k]].list_count << endl;
		}
		cout << "-----------------------------------" << endl;
		/*int rand_idx = keyword_idf_rank.size()-1;
		keyword_idx = keyword_idf_rank[rand_idx].second;*/
		
	/*	social_graph->InitPointTranverse(user_idx);
		inverted_index.SingleKeywordQueryBaseLineInvertedIndex(keyword_idx,user_idx ,input,true);
		social_graph->ResetTimer();
		cout << "+++++++++++++++++++++" << endl;
		social_graph->InitPointTranverse(user_idx);
		inverted_index.SingleKeywordQueryBaseLineInvertedIndex(keyword_idx,user_idx ,input,false);
		social_graph->ResetTimer();
		cout << "+++++++++++++++++++++" << endl;
		social_graph->ResetTimer();
		social_graph->InitPointTranverse(user_idx);
		inverted_index.SingleKeywordQuery(keyword_idx,user_idx ,input,true);
		cout << "+++++++++++++++++++++" << endl;
		social_graph->ResetTimer();
		social_graph->InitPointTranverse(user_idx);
		inverted_index.SingleKeywordQuery(keyword_idx,user_idx ,input,false);
		cout << "+++++++++++++++++++++" << endl;*/
		//cout << " ********************************************* " << endl;
		//cout << " ********************************************* " << endl;

		social_graph->InitPointTranverse(user_idx);
		MultiKeywordNaviveBaseLineInvertedIndex(keyword_idxes,user_idx,input,2);
		social_graph->ResetTimer();
		cout << "+++++++++++++++++++++" << endl;
		social_graph->InitPointTranverse(user_idx);
		MultiKeywordNaviveBaseLineInvertedIndex(keyword_idxes,user_idx,input,1);
		social_graph->ResetTimer();
		cout << "+++++++++++++++++++++" << endl;
		social_graph->InitPointTranverse(user_idx);
		MultiKeywordQuery(keyword_idxes,user_idx,input,2);
		social_graph->ResetTimer();
		cout << "+++++++++++++++++++++" << endl;
		social_graph->InitPointTranverse(user_idx);
		MultiKeywordQuery(keyword_idxes,user_idx,input,1);
		social_graph->ResetTimer();
		cout << "+++++++++++++++++++++" << endl;
		social_graph->InitPointTranverse(user_idx);
		MultiKeywordNormalBaseLineInvertedIndex(keyword_idxes,user_idx,input,2);
		social_graph->ResetTimer();
		cout << "+++++++++++++++++++++" << endl;
		social_graph->InitPointTranverse(user_idx);
		MultiKeywordNormalBaseLineInvertedIndex(keyword_idxes,user_idx,input,1);
		social_graph->ResetTimer();
		cout << "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&" << endl;
	}

	perform_file << alpha << " " << beta << " " << gamma << endl;
	ComputeStructArray();
	for(int i = 0; i < 6; i++){
		perform_file << performance_result[i].ToString() << endl;
	}
	perform_file << endl;
	ClearStructArray();

	}

	perform_file.close();
}

int main(int argc, char *argv[]){

	if(argc != 4){
		cout << "wrong arguments for " << argv[0] << endl;
		cout << "1. input directory" << endl;
		cout <<" 2. number of partitions" << endl;
		cout << "3. true partitions" << endl;
		exit(1);
	}

	string in_dir(argv[1]);
	int input_partitions = atoi(argv[2]);
	int true_partitions = atoi(argv[3]);

	Graph* social_graph = new Graph(input_partitions,true_partitions,in_dir);
	
	/*social_graph->InitPointTranverse(422259);
	cout << "dist 2 hop: " << social_graph->GetSocialRelevance(521364,0,true,true) << endl;
	cout << "dist: " << social_graph->GetSocialRelevance(521364,0,true,false) << endl;
	exit(1);*/

	/*srand(123456);
	vector<int> dist_count(50,0);
	for(int i = 0; i < 10; i++){
		int rand_user = rand() % social_graph->GetNumNodes();
		social_graph->InitPointTranverse(rand_user);

		cout << "node " << i << " with idx: "<< rand_user << endl;
		for(int j = 0; j < social_graph->GetNumNodes(); j++){
			if(j == rand_user) continue;
			int dist = static_cast<int>(social_graph->GetSocialRelevance(j,0) * social_graph->GetMaxSocialDistance());
			if(dist > 500 || dist < 0){
				cout << "problem with dist: " << dist << endl;
				cout << "target node: " << j << endl;
				exit(1);
			}
			dist_count[dist/10]++;
		}
	}
	for(int i = 0; i < dist_count.size(); i++){
		cout << i*10 << " " << dist_count[i] << endl;
	}
	exit(1);*/
	InvertedIndex inverted_index(in_dir,0,social_graph);

	vector<int> slice_vec;
	slice_vec.push_back(5);
	slice_vec.push_back(10);
	slice_vec.push_back(20);
	slice_vec.push_back(30);
	slice_vec.push_back(40);
	slice_vec.push_back(50);

	for(int i = 0; i < slice_vec.size(); i++) inverted_index.TestSuit(slice_vec[i]*TIME_INTERVAL_SIZE);

	return 0;
}