#include "StatisticCollector.h"
#include "Graph.h"
#include "Utility.h"
#include <fstream>
#include <sstream>
#include <unordered_map>
#include <iostream>
#include <unordered_set>
#include <set>
#include <time.h>
#include <algorithm>
using namespace std;

void get_average_tweet_per_person(string in_node_file, string in_stop_word_file, vector<string>& in_tweet_files){
	ifstream stop_word_file(in_stop_word_file.c_str());
	if(!stop_word_file.is_open()){
		cout << "cannot open stop word file" << endl;
		exit(1);
	}
	string stop_word;
	set<string> stop_word_set;
	int number_of_stop_words = 0;
	while(stop_word_file >> stop_word){
		if(stop_word_set.find(stop_word) == stop_word_set.end()){
			stop_word_set.insert(stop_word);
			number_of_stop_words++;
		}
	}
	stop_word_file.close();
	cout << "finish reading stop words" << endl;

	
	ifstream node_file(in_node_file.c_str());
	if(!node_file.is_open()){
		cout << "cannot open node file" << endl;
		exit(1);
	}

	unordered_set<string> user_name_set;
	string node_line;
	int num_users = 0;
	while(getline(node_file,node_line)){
		int node_ID;
		string node_name;
		stringstream ss(node_line);
		ss >> node_ID;
		ss >> node_name;

		user_name_set.insert(node_name);
		num_users++;
		if(num_users % 1000000 == 0){
			cout << "read " << (num_users / 1000000) << "M users" << endl;
		}
	}
	node_file.close();
	cout << "finish reading user file" << endl;

	int number_of_tokens = 0;
	int number_of_tweets = 0;

	unordered_map<string,int> keyword_map;

	cout << "start to read tweet file" << endl;
	for(int i = 0; i < in_tweet_files.size(); i++){
		ifstream tweet_file(in_tweet_files[i].c_str());
		if(!tweet_file.is_open()){
			cout << "cannot open tweet file: " << in_tweet_files[i] << endl;
			exit(1);
		}
		cout << "reading file: " << in_tweet_files[i] << endl;
		cout << "now number of tweets: " << number_of_tweets << endl;

		string empty_line;
		getline(tweet_file,empty_line);
		while(true){
			string publish_time;
			string author;
			string tweet;

			getline(tweet_file,publish_time);
			if(publish_time.empty()) break;
			publish_time.erase(0,2); // erase first two characters

			getline(tweet_file,author);
			if(author.empty()) break;
			author.erase(0,21);      // erase until the name of the users
		
			getline(tweet_file,tweet);
			if(tweet.empty()) break;
			tweet.erase(0,2);

			getline(tweet_file,empty_line); // read the empty line

			auto it = user_name_set.find(author);
			if(it == user_name_set.end()) continue;

			string tweet_origin(tweet);
			if(IsStringContainNonAsciiChar(tweet)) continue;
			tweet = ReplaceStrangeCharacterWithSpace(tweet);
			if(tweet.compare("no post title") == 0) continue;

			stringstream iss(tweet);
			stringstream oss;
			string token;
			while(iss >> token){
				if(IsTokenKeyword(token,stop_word_set)){
					oss << token << " ";
					number_of_tokens++;
					if(keyword_map.find(token) != keyword_map.end()){
						keyword_map[token] ++;
					}else{
						keyword_map[token] = 1;
					}
				}
			}
			tweet = oss.str();
			if(tweet.empty()) continue;

			number_of_tweets++;
			if(number_of_tweets % 1000000 == 0){
				cout << "read " << (number_of_tweets / 1000000) << "M tweets" << endl;
			}	
		}

		tweet_file.close();
	}

	int freq_keyword = 0;
	int freq_keyword_count = 0;
	for(auto it = keyword_map.begin(); it != keyword_map.end(); it++){
		if(it->second < KEYWORD_THRESHOLD) continue;
		freq_keyword++;
		freq_keyword_count += it->second;
	}
	
	cout << "number of user of the dataset: " << num_users << endl;
	cout << "number of distinct stop words: " << number_of_stop_words << endl;
	cout << "number of tweets: " << number_of_tweets << endl;
	cout << "total number of token: " << number_of_tokens << endl;
	cout << "number of distinct keyword: " << freq_keyword << endl;
	cout << "average tweets per person: " << (double)number_of_tweets / num_users << endl;
	cout << "average token per person: " << (double)number_of_tokens / num_users << endl;
	cout << "average keywords per person: " << (double)freq_keyword_count / num_users << endl;

}

void get_average_degree(string in_node_file, string in_edge_file){
	ifstream node_file(in_node_file.c_str());
	if(!node_file.is_open()){
		cout << "cannot open node file" << endl;
		exit(1);
	}

	unordered_map<int,int> user_id_set;
	string node_line;
	int num_users = 0;
	while(getline(node_file,node_line)){
		int node_ID;
		string node_name;
		stringstream ss(node_line);
		ss >> node_ID;
		ss >> node_name;

		user_id_set[node_ID] = num_users;
		num_users++;
		if(num_users % 1000000 == 0){
			cout << "read " << (num_users / 1000000) << "M users" << endl;
		}
	}
	node_file.close();
	cout << "finish reading user file" << endl;

	vector<vector<int> > adj_list(num_users);
	ifstream edge_file(in_edge_file.c_str());
	if(!edge_file.is_open()){
		cout << "cannot open edge file" << endl;
		exit(1);
	}

	int number_edges = 0;
	while(true){
		int followed_ID, following_ID;
		if(!(edge_file >> followed_ID)) break;
		if(!(edge_file >> following_ID)) break;

		if(followed_ID == following_ID) continue;
		if(user_id_set.find(followed_ID) == user_id_set.end() || user_id_set.find(following_ID) == user_id_set.end()) continue;

		number_edges++;
		adj_list[user_id_set[following_ID]].push_back(user_id_set[followed_ID]);

		if(number_edges % 10000000 == 0) cout << "read " << (number_edges / 10000000) << "x10M edges" << endl;
	}
	edge_file.close();

	for(int i = 0; i < adj_list.size(); i++) sort(adj_list[i].begin(),adj_list[i].end());
	cout << "finish sorting array" << endl;
	

	int number_undirected = 0;
	for(int i = 0; i < adj_list.size(); i++){
		for(int j = 0; j < adj_list[i].size(); j++){
			if(adj_list[i][j] < i) continue;
			if(binary_search(adj_list[j].begin(),adj_list[j].end(),i)) number_undirected++;
		}
	}

	cout << "number of user: " << num_users << endl;
	cout << "number of undirected edges: " << number_undirected << endl;
	cout << "number of directed edges: " << number_edges << endl;
	cout << "average degree of users: " << (double)(number_undirected * 2) / num_users << endl;
}

void GraphDistanceTrend(string in_dir, int num_partitions)
{
	srand(time(NULL));
	Graph social_graph(num_partitions,num_partitions,in_dir);

	int num_nodes = social_graph.GetNumNodes();

	int max_social_dist = social_graph.GetMaxSocialDistance();
	vector<int> count(max_social_dist/10,0);

	for(int i = 0; i < 100; i++)
	{
		int source_node = rand() % num_nodes;
		social_graph.InitPointTranverse(source_node);

		for (int x = 0; x < num_nodes; x++)
		{
			if(x != source_node)
			{
				double relevance = social_graph.GetSocialRelevance(x,0,2);
				int dist = static_cast<int>((double)(1.0-relevance)*max_social_dist);
				count[dist/10]++;
			}
		}
	}

	for (int i = 0; i < count.size(); i++)
	{
		count[i] = count[i]/100;
		if(count[i] != 0)
		{
			cout << (i*10) << " : " << count[i] << endl;
		}
	}

}

int main(){
	/*vector<string> in_tweets_vec;
	in_tweets_vec.push_back("./data/tweets/tweets2009-06.txt");
	in_tweets_vec.push_back("./data/tweets/tweets2009-07.txt");
	in_tweets_vec.push_back("./data/tweets/tweets2009-08.txt");
	get_average_tweet_per_person("./data/numeric2screen","./data/stop_words.txt",in_tweets_vec);*/

	GraphDistanceTrend("./sampledNews",20);

	//get_average_degree("./data/numeric2screen","./data/twitter_rv.net");
}