﻿#include "DataFrame.h"

namespace text_match
{
const float eps = 1e-5;

void removeSortRepeat(vector<int> &u){
    int j=1;
    for(int i=1; i<u.size(); i++){
        if(u[i] != u[i-1]){
            u[j++] = u[i];
        }
    }
    while(u.size() > j) u.pop_back();
}

float getWeight(const std::vector<float> &weight, const int key, const std::vector<bool> &stop_words){
    if(key >= 0 && key < stop_words.size() && stop_words[key]) return 0.0f;
    if(weight.size() == 0) return 1.0f;
    if(key < 0 || key >= weight.size()) return 0.0f;
    return weight[key];
}

float wordMatch(std::vector<int> u, std::vector<int> v, const std::vector<bool> &stop_words, const std::vector<float> &weight){
    sort(u.begin(), u.end());
    sort(v.begin(), v.end());
    removeSortRepeat(u);
    removeSortRepeat(v);
    float share_weight = 0;
    float total_weight = 0;
    for(int i=0; i<u.size(); i++){
        total_weight += getWeight(weight, u[i], stop_words);
        for(int j=0; j<v.size(); j++){
            if(i==0) total_weight += getWeight(weight, v[j], stop_words);
            if(u[i] == v[j]) share_weight += getWeight(weight, u[i], stop_words);
        }
    }
    return 2.0f * share_weight / (total_weight + eps);
}


float moment(const VectorXf &u, int n){
    if(n==0) return 1.0f;
    if(n==1) return 0.0f;
    return (u.array() - u.mean()).pow(n).mean();
}

float skew(VectorXf u){
    float m2 = moment(u, 2);
    float m3 = moment(u, 3);
    if(abs(m2) < eps) return 0.0f;
    return m3 / pow(m2, 1.5);
}

float kurtosis(VectorXf u){
    float m2 = moment(u, 2);
    float m4 = moment(u, 4);
    if(abs(m2)<eps) return -3.0f;
    return m4 / pow(m2, 2) - 3.0f;
}

float russellrao(VectorXf u, VectorXf v){
    if(u.size() != v.size() || u.size() == 0 || v.size() == 0){
        printf("ERROR: russellrao distance input should have same nonzero size\n");
        return 0;
    }

    ArrayXf u_arr = u.array();
    ArrayXf v_arr = v.array();
    float tt = (u_arr * v_arr).sum();
    int n = u.size();
    return (n - tt) / (n + eps);
}

void count_not_bool(VectorXf u, VectorXf v, float &tt, float &tf, float &ff, float &ft){
    ArrayXf u_arr = u.array();
    ArrayXf v_arr = v.array();
    ArrayXf not_u = 1.0f - u_arr;
    ArrayXf not_v = 1.0f - v_arr;

    ff = (not_u * not_v).sum();
    tf = (u_arr * not_v).sum();
    tt = (u_arr * v_arr).sum();
    ft = (not_u * v_arr).sum();
}

float braycurtis(VectorXf u, VectorXf v){
    if(u.size() != v.size() || u.size() == 0 || v.size() == 0){
        printf("ERROR: braycurtis distance input should have same nonzero size\n");
        return 0;
    }

    ArrayXf u_arr = u.array();
    ArrayXf v_arr = v.array();

    return (u_arr - v_arr).abs().sum() / ((u_arr + v_arr).abs().sum() + eps);
}

float sokalmichener(VectorXf u, VectorXf v){
    if(u.size() != v.size() || u.size() == 0 || v.size() == 0){
        printf("ERROR: sokalmichener distance input should have same nonzero size\n");
        return 0;
    }

    float tt, ff, tf, ft;
    count_not_bool(u, v, tt, tf, ff, ft);
    return 2.0f * (tf + ft) / (tt + ff + 2.0f * (tf + ft) + eps);
}

float canberra(VectorXf u, VectorXf v){
    if(u.size() != v.size() || u.size() == 0 || v.size() == 0){
        printf("ERROR: canberra distance input should have same nonzero size\n");
        return 0;
    }

    ArrayXf u_arr = u.array();
    ArrayXf v_arr = v.array();

    return (u_arr - v_arr).abs().sum() / ((u_arr.abs() + v_arr.abs()).sum() + eps);
}

float chebyshev(VectorXf u, VectorXf v){
    if(u.size() != v.size() || u.size() == 0 || v.size() == 0){
        printf("ERROR: chebyshev distance input should have same nonzero size\n");
        return 0;
    }

    return (u.array() - v.array()).abs().maxCoeff();
}


bool is_chinese(const string& str)
{
  unsigned char utf[4] = {0};
  unsigned char unicode[3] = {0};
  bool res = false;
  for (int i = 0; i < str.length(); i++) {
    if ((str[i] & 0x80) == 0) {   //ascii begin with 0
      res = false;
    }
    else /*if ((str[i] & 0x80) == 1) */{
      utf[0] = str[i];
      utf[1] = str[i + 1];
      utf[2] = str[i + 2];
      i++;
      i++;
      unicode[0] = ((utf[0] & 0x0F) << 4) | ((utf[1] & 0x3C) >>2);
      unicode[1] = ((utf[1] & 0x03) << 6) | (utf[2] & 0x3F);
//      printf("%x,%x\n",unicode[0],unicode[1]);
//      printf("aaaa %x,%x,%x\n\n",utf[0],utf[1],utf[2]);
      if(unicode[0] >= 0x4e && unicode[0] <= 0x9f){
         if (unicode[0] == 0x9f && unicode[1] >0xa5)
                res = false;
         else         
               res = true;
      }else
         res = false;
    }
  }
  return res;
}

void split_string(const string& s, const string& delim,vector<string>& ret)
{
	size_t last = 0;
	size_t index=s.find_first_of(delim,last);
	while (index!=std::string::npos)
	{
		ret.push_back(s.substr(last,index-last));
		last=index+1;
		index=s.find_first_of(delim,last);
	}
	if (index-last>0)
	{
		ret.push_back(s.substr(last,index-last));
	}
}


void Char2Token(const char* szSent, vector<string>& tokens){
	int i = 0;
	while (szSent[i] != '\0')
	{
		if (szSent[i]<0){//汉字
			char szTmp[4];// = {0};
			szTmp[0] = szSent[i];
			szTmp[1] = szSent[i+1];
			szTmp[2] = szSent[i+2];
			szTmp[3] = 0;
			string strTmp(szTmp);
			if(is_chinese(strTmp)){
			    tokens.push_back(strTmp);
			}
			i+=3;
		}else{
			char szTmp[3] = {0};
			szTmp[0] = szSent[i];
			if(judgeletter_bit(szTmp[0])){
			    string strTmp(szTmp);
			    tokens.push_back(strTmp);
			}
			i++;
		}
	}
	return;
}

DataFrame::DataFrame(){
	
	const char* const DICT_PATH = "cppjieba/dict/jieba.dict.utf8";
    const char* const HMM_PATH = "cppjieba/dict/hmm_model.utf8";
    const char* const USER_DICT_PATH = "cppjieba/dict/user.dict.utf8";
    const char* const IDF_PATH = "cppjieba/dict/idf.utf8";
    const char* const STOP_WORD_PATH = "cppjieba/dict/stop_words.utf8";

	j=new cppjieba::Jieba(DICT_PATH,
        HMM_PATH,
        USER_DICT_PATH,
        IDF_PATH,
        STOP_WORD_PATH);
}

void DataFrame::init_for_predict(){
	TrainTemp train_temp = this->read_csv("data.tsv");
	this->compute_tfidf(train_temp);
	this->load_wordvec("word2vec.vec");
	this->load_wordvec_norm("word2vec_norm.vec");
}

void DataFrame::compute_edit_distance_for_predict(PredictTemp predict_temp,FeatureResult &feature_result){
	vector<vector<string> > text1fenci_column = predict_temp.text1fenci_column_predict;
	vector<vector<string> > text1fenzi_column = predict_temp.text1fenzi_column_predict;
	vector<vector<string> > text2fenci_column = predict_temp.text2fenci_column_predict;
	vector<vector<string> > text2fenzi_column = predict_temp.text2fenzi_column_predict;
	for(int i =0;i<text1fenci_column.size();i++){
		vector<int> left;
		for(int j=0;j<text1fenci_column[i].size();j++){
			left.push_back(word2id[text1fenci_column[i][j]]);
		}
		vector<int> right;
		for(int j=0;j<text2fenci_column[i].size();j++){
			right.push_back(word2id[text2fenci_column[i][j]]);
		}
		feature_result.word_levenstein.push_back(EditDistance::levenstein(left,right));
		feature_result.word_jaroWinkler.push_back(EditDistance::jaroWinkler(left,right));
		feature_result.word_tokenSetRatio.push_back(EditDistance::tokenSetRatio(left,right));
		feature_result.word_partialRatio.push_back(EditDistance::partialRatio(left,right));
		feature_result.word_partialSortRatio.push_back(EditDistance::partialSortRatio(left,right));
		feature_result.word_sortRatio.push_back(EditDistance::sortRatio(left,right));
	}
}

void DataFrame::compute_edit_distance(TrainTemp train_temp){
	cout<<"start compute edit distance"<<endl;
	vector<vector<string> > text1fenci_column = train_temp.text1fenci_column;
	vector<vector<string> > text1fenzi_column = train_temp.text1fenzi_column;
	vector<vector<string> > text2fenci_column = train_temp.text2fenci_column;
	vector<vector<string> > text2fenzi_column = train_temp.text2fenzi_column;
	for(int i =0;i<text1fenci_column.size();i++){
		vector<int> left;
		for(int j=0;j<text1fenci_column[i].size();j++){
			left.push_back(word2id[text1fenci_column[i][j]]);
		}
		vector<int> right;
		for(int j=0;j<text2fenci_column[i].size();j++){
			right.push_back(word2id[text2fenci_column[i][j]]);
		}
		feature_result.word_levenstein.push_back(EditDistance::levenstein(left,right));
		feature_result.word_jaroWinkler.push_back(EditDistance::jaroWinkler(left,right));
		feature_result.word_tokenSetRatio.push_back(EditDistance::tokenSetRatio(left,right));
		feature_result.word_partialRatio.push_back(EditDistance::partialRatio(left,right));
		feature_result.word_partialSortRatio.push_back(EditDistance::partialSortRatio(left,right));
		feature_result.word_sortRatio.push_back(EditDistance::sortRatio(left,right));
	}
	cout<<"end compute edit distance"<<endl;
}

FeatureResult DataFrame::predict(string input1,string input2){
    FeatureResult feature_result_for_predict;
	PredictTemp predict_temp = this->fenci_for_predict(input1,input2);
	this->compute_tfidf_for_predict(predict_temp,feature_result_for_predict);
	this->compute_wordvec_for_predict(predict_temp,feature_result_for_predict);
	this->compute_len_feature_for_predict(predict_temp,feature_result_for_predict);
	this->compute_wordmatch_for_predict(predict_temp,feature_result_for_predict);
	this->compute_edit_distance_for_predict(predict_temp,feature_result_for_predict);
	return feature_result_for_predict;
}

int DataFrame::get_rownum(){
	return text1_column.size();
}

cppjieba::Jieba* DataFrame::get_jieba(){
	return j;
}

void DataFrame::compute_wordvec(TrainTemp train_temp){
	vector<vector<string> > text1fenci_column = train_temp.text1fenci_column;
	vector<vector<string> > text1fenzi_column = train_temp.text1fenzi_column;
	vector<vector<string> > text2fenci_column = train_temp.text2fenci_column;
	vector<vector<string> > text2fenzi_column = train_temp.text2fenzi_column;
	for(int i=0;i<text1fenci_column.size();i++){
		VectorXf one_vector1 = VectorXf::Zero(100);
		VectorXf one_vector1_norm = VectorXf::Zero(100);
		for(int j=1;j<text1fenci_column[i].size();j++){
			string word = text1fenci_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector1 = one_vector1 + word2vec[word];
					one_vector1_norm = one_vector1_norm + word2vec_norm[word];
				}
		    //}
		}
		
		VectorXf one_vector2 = VectorXf::Zero(100);
		VectorXf one_vector2_norm = VectorXf::Zero(100);
		for(int j=1;j<text2fenci_column[i].size();j++){
			string word = text2fenci_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector2 = one_vector2 + word2vec[word];
					one_vector2_norm = one_vector2_norm + word2vec_norm[word];
				}
		    //}
		}
		
		VectorXf one_vector3 = VectorXf::Zero(100);
		VectorXf one_vector3_norm = VectorXf::Zero(100);
		for(int j=1;j<text1fenzi_column[i].size();j++){
			string word = text1fenzi_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector3 = one_vector3 + word2vec[word];
					one_vector3_norm = one_vector3_norm + word2vec_norm[word];
				}
		    //}
		}
		
		VectorXf one_vector4 = VectorXf::Zero(100);
		VectorXf one_vector4_norm = VectorXf::Zero(100);
		for(int j=1;j<text2fenzi_column[i].size();j++){
			string word = text2fenzi_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector4 = one_vector4 + word2vec[word];
					one_vector4_norm = one_vector4_norm + word2vec_norm[word];
				}
		    //}
		}
		feature_result.word_vec_mean_kulsinski.push_back(ScipyDistance::kulsinski(one_vector1,one_vector2));
		feature_result.char_vec_mean_kulsinski.push_back(ScipyDistance::kulsinski(one_vector3,one_vector4));
		feature_result.word_vec_mean_russellrao.push_back(ScipyDistance::russellrao(one_vector1,one_vector2));
		feature_result.char_vec_mean_russellrao.push_back(ScipyDistance::russellrao(one_vector3,one_vector4));
		
		feature_result.word_vec_mean_kulsinski_norm.push_back(ScipyDistance::kulsinski(one_vector1_norm,one_vector2_norm));
		feature_result.char_vec_mean_kulsinski_norm.push_back(ScipyDistance::kulsinski(one_vector3_norm,one_vector4_norm));
		feature_result.word_vec_mean_russellrao_norm.push_back(ScipyDistance::russellrao(one_vector1_norm,one_vector2_norm));
		feature_result.char_vec_mean_russellrao_norm.push_back(ScipyDistance::russellrao(one_vector3_norm,one_vector4_norm));
		
		feature_result.word_q2_kur.push_back(ScipyDistance::kurtosis(one_vector2));
		feature_result.word_q1_kur.push_back(ScipyDistance::kurtosis(one_vector1));
		feature_result.char_q2_kur.push_back(ScipyDistance::kurtosis(one_vector4));
		feature_result.char_q1_kur.push_back(ScipyDistance::kurtosis(one_vector3));
		feature_result.word_q2_skew.push_back(ScipyDistance::skew(one_vector2));
		feature_result.word_q1_skew.push_back(ScipyDistance::skew(one_vector1));
		feature_result.char_q2_skew.push_back(ScipyDistance::skew(one_vector4));
		feature_result.char_q1_skew.push_back(ScipyDistance::skew(one_vector3));
	}
	cout<<"compute wordvec feature end"<<endl;
}

void DataFrame::load_wordvec(const char * filename)
{
	ifstream infile;
	infile.open(filename); 
	if(!infile.is_open())
	{
		cout << "打开文件出错" << endl;
		return;
	}
	string line;
	vector<string> split_result_firstline;
	getline(infile,line);
	split_string(line," ",split_result_firstline);
	cout<<"word dict size "<<split_result_firstline[0]<<endl;
	//cout<<split_result_firstline[1]<<endl;
    while(getline(infile,line))
    { 
        vector<string> split_result;
        split_string(line," ",split_result);
		vector<float> split_result_float;
		for(int i=1;i<split_result.size();i++){
			split_result_float.push_back(atof(split_result[i].c_str()));
		}
		float* ptr = &split_result_float[0];
		Map<VectorXf> one_vector(ptr, 100);
		word2vec[split_result[0]]=one_vector;
    }
    infile.close();
	cout<<"read word size "<<word2vec.size()<<endl;
    cout<<"read wordvec end"<<endl;
}	

void DataFrame::load_wordvec_norm(const char * filename)
{
	ifstream infile;
	infile.open(filename); 
	if(!infile.is_open())
	{
		cout << "打开文件出错" << endl;
		return;
	}
	string line;
	vector<string> split_result_firstline;
	getline(infile,line);
	split_string(line," ",split_result_firstline);
	cout<<"word dict size "<<split_result_firstline[0]<<endl;
	//cout<<split_result_firstline[1]<<endl;
    while(getline(infile,line))
    { 
        vector<string> split_result;
        split_string(line," ",split_result);
		vector<float> split_result_float;
		for(int i=1;i<split_result.size();i++){
			split_result_float.push_back(atof(split_result[i].c_str()));
		}
		float* ptr = &split_result_float[0];
		Map<VectorXf> one_vector(ptr, 100);
		word2vec_norm[split_result[0]]=one_vector;
    }
    infile.close();
	cout<<"read word norm size "<<word2vec_norm.size()<<endl;
    cout<<"read wordvec norm end"<<endl;
}	


void DataFrame::compute_wordvec_for_predict(PredictTemp predict_temp,FeatureResult &feature_result_for_predict)
{
	
    vector<vector<string> > text1fenci_column = predict_temp.text1fenci_column_predict;
	vector<vector<string> > text1fenzi_column = predict_temp.text1fenzi_column_predict;
	vector<vector<string> > text2fenci_column = predict_temp.text2fenci_column_predict;
	vector<vector<string> > text2fenzi_column = predict_temp.text2fenzi_column_predict;

	for(int i=0;i<text1fenci_column.size();i++){
		VectorXf one_vector1 = VectorXf::Zero(100);
		VectorXf one_vector1_norm = VectorXf::Zero(100);
		for(int j=1;j<text1fenci_column[i].size();j++){
			string word = text1fenci_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector1 = one_vector1 + word2vec[word];
					one_vector1_norm = one_vector1_norm + word2vec_norm[word];
				}
		    //}
		}
		
		VectorXf one_vector2 = VectorXf::Zero(100);
		VectorXf one_vector2_norm = VectorXf::Zero(100);
		for(int j=1;j<text2fenci_column[i].size();j++){
			string word = text2fenci_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector2 = one_vector2 + word2vec[word];
					one_vector2_norm = one_vector2_norm + word2vec_norm[word];
				}
		    //}
		}
		
		VectorXf one_vector3 = VectorXf::Zero(100);
		VectorXf one_vector3_norm = VectorXf::Zero(100);
		for(int j=1;j<text1fenzi_column[i].size();j++){
			string word = text1fenzi_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector3 = one_vector3 + word2vec[word];
					one_vector3_norm = one_vector3_norm + word2vec_norm[word];
				}
		    //}
		}
		
		VectorXf one_vector4 = VectorXf::Zero(100);
		VectorXf one_vector4_norm = VectorXf::Zero(100);
		for(int j=1;j<text2fenzi_column[i].size();j++){
			string word = text2fenzi_column[i][j];
			//if (is_chinese(word)){
				map<string,VectorXf>::iterator search = word2vec.find(word);
                if(search != word2vec.end()){
			        one_vector4 = one_vector4 + word2vec[word];
					one_vector4_norm = one_vector4_norm + word2vec_norm[word];
				}
		    //}
		}
		
		feature_result_for_predict.word_vec_mean_kulsinski.push_back(ScipyDistance::kulsinski(one_vector1,one_vector2));
		feature_result_for_predict.char_vec_mean_kulsinski.push_back(ScipyDistance::kulsinski(one_vector3,one_vector4));
		feature_result_for_predict.word_vec_mean_russellrao.push_back(ScipyDistance::russellrao(one_vector1,one_vector2));
		feature_result_for_predict.char_vec_mean_russellrao.push_back(ScipyDistance::russellrao(one_vector3,one_vector4));
		
		feature_result_for_predict.word_vec_mean_kulsinski_norm.push_back(ScipyDistance::kulsinski(one_vector1_norm,one_vector2_norm));
		feature_result_for_predict.char_vec_mean_kulsinski_norm.push_back(ScipyDistance::kulsinski(one_vector3_norm,one_vector4_norm));
		feature_result_for_predict.word_vec_mean_russellrao_norm.push_back(ScipyDistance::russellrao(one_vector1_norm,one_vector2_norm));
		feature_result_for_predict.char_vec_mean_russellrao_norm.push_back(ScipyDistance::russellrao(one_vector3_norm,one_vector4_norm));
		
		feature_result_for_predict.word_q2_kur.push_back(kurtosis(one_vector2));
		feature_result_for_predict.word_q1_kur.push_back(kurtosis(one_vector1));
		feature_result_for_predict.char_q2_kur.push_back(kurtosis(one_vector4));
		feature_result_for_predict.char_q1_kur.push_back(kurtosis(one_vector3));
		feature_result_for_predict.word_q2_skew.push_back(skew(one_vector2));
		feature_result_for_predict.word_q1_skew.push_back(skew(one_vector1));
		feature_result_for_predict.char_q2_skew.push_back(skew(one_vector4));
		feature_result_for_predict.char_q1_skew.push_back(skew(one_vector3));
	}
	cout<<"compute wordvec feature end"<<endl;
}	

void DataFrame::compute_len_feature_for_predict(PredictTemp predict_temp,FeatureResult &feature_result_for_predict)
{
	vector<vector<string> > text1fenci_column_predict = predict_temp.text1fenci_column_predict;
	vector<vector<string> > text2fenci_column_predict = predict_temp.text2fenci_column_predict;
    for(int i=0;i<text1fenci_column_predict.size();i++){
		feature_result_for_predict.len_word_text1.push_back(text1fenci_column_predict[i].size());
		feature_result_for_predict.len_word_text2.push_back(text2fenci_column_predict[i].size());
	}
}

void DataFrame::compute_len_feature(TrainTemp train_temp)
{
	vector<vector<string> > text1fenci_column = train_temp.text1fenci_column;
	vector<vector<string> > text2fenci_column = train_temp.text2fenci_column;
    for(int i=0;i<text1fenci_column.size();i++){
		feature_result.len_word_text1.push_back(text1fenci_column[i].size());
		feature_result.len_word_text2.push_back(text2fenci_column[i].size());
	}
}


void DataFrame::compute_wordmatch_for_predict(PredictTemp predict_temp,FeatureResult &feature_result)
{

  vector<vector<string> > text1fenci_column = predict_temp.text1fenci_column_predict;
  vector<vector<string> > text2fenci_column = predict_temp.text2fenci_column_predict;
  vector<vector<string> > text1fenzi_column = predict_temp.text1fenzi_column_predict;
  vector<vector<string> > text2fenzi_column = predict_temp.text2fenzi_column_predict;
  
  if(stopword_vector.size()<=0){
	  cout<<"ERROR stopword not load"<<endl;
  }
  if(word2id.size()<=0){
	  cout<<"ERROR word2id not load"<<endl;
  }
  
  vector<bool> stopword_vector_temp;
  map<string,int>::iterator iter;
  for(iter=word2id.begin();iter!=word2id.end();iter++)    
  {
      stopword_vector_temp.push_back(false);
  }  
	 
  for(int i=0;i<text1fenci_column.size();i++){
	 vector<int> temp1;
	 vector<int> temp2;
	 for(int j=0;j<text1fenci_column[i].size();j++){
		 temp1.push_back(word2id[text1fenci_column[i][j]]);
	 }
	 for(int j=0;j<text2fenci_column[i].size();j++){
		 temp2.push_back(word2id[text2fenci_column[i][j]]);
	 }
	 vector<int> temp3;
	 vector<int> temp4;
	 for(int j=0;j<text1fenzi_column[i].size();j++){
		 temp3.push_back(word2id[text1fenzi_column[i][j]]);
	 }
	 for(int j=0;j<text2fenzi_column[i].size();j++){
		 temp4.push_back(word2id[text2fenzi_column[i][j]]);
	 }

	 feature_result.word_match_list_stopword.push_back(EditDistance::wordMatch(temp1,temp2,this->stopword_vector,this->weight));
	 feature_result.char_match_list_stopword.push_back(EditDistance::wordMatch(temp3,temp4,this->stopword_vector,this->weight));
	 
	 feature_result.word_match_list.push_back(EditDistance::wordMatch(temp1,temp2,stopword_vector_temp,this->weight));
	 feature_result.char_match_list.push_back(EditDistance::wordMatch(temp3,temp4,stopword_vector_temp,this->weight));
	 
  }	
  cout<<"compute word match for predict end"<<endl;
}

void DataFrame::compute_wordmatch(TrainTemp train_temp)
{

  vector<vector<string> > text1fenci_column = train_temp.text1fenci_column;
  vector<vector<string> > text2fenci_column = train_temp.text2fenci_column;
  vector<vector<string> > text1fenzi_column = train_temp.text1fenzi_column;
  vector<vector<string> > text2fenzi_column = train_temp.text2fenzi_column;
  
  if(stopword_vector.size()<=0){
	  cout<<"ERROR stopword not load"<<endl;
  }
  if(word2id.size()<=0){
	  cout<<"ERROR word2id not load"<<endl;
  }
  
  vector<bool> stopword_vector_temp;
  map<string,int>::iterator iter;
  for(iter=word2id.begin();iter!=word2id.end();iter++)    
  {
      stopword_vector_temp.push_back(false);
  }
  for(int i=0;i<text1fenci_column.size();i++){
	 vector<int> temp1;
	 vector<int> temp2;
	 for(int j=0;j<text1fenci_column[i].size();j++){
		 temp1.push_back(word2id[text1fenci_column[i][j]]);
	 }
	 for(int j=0;j<text2fenci_column[i].size();j++){
		 temp2.push_back(word2id[text2fenci_column[i][j]]);
	 }
	 vector<int> temp3;
	 vector<int> temp4;
	 for(int j=0;j<text1fenzi_column[i].size();j++){
		 temp3.push_back(word2id[text1fenzi_column[i][j]]);
	 }
	 for(int j=0;j<text2fenzi_column[i].size();j++){
		 temp4.push_back(word2id[text2fenzi_column[i][j]]);
	 }
	 feature_result.word_match_list_stopword.push_back(EditDistance::wordMatch(temp1,temp2,this->stopword_vector,this->weight));
	 feature_result.char_match_list_stopword.push_back(EditDistance::wordMatch(temp3,temp4,this->stopword_vector,this->weight));
	 
	 feature_result.word_match_list.push_back(EditDistance::wordMatch(temp1,temp2,stopword_vector_temp,this->weight));
	 feature_result.char_match_list.push_back(EditDistance::wordMatch(temp3,temp4,stopword_vector_temp,this->weight));
  }	
  cout<<"compute word match end"<<endl;
}

void DataFrame::compute_tfidf_for_predict(PredictTemp predict_temp,FeatureResult &feature_result_for_predict){
	
	vector<vector<string> > text1fenci_column_predict = predict_temp.text1fenci_column_predict;
	vector<vector<string> > text1fenzi_column_predict = predict_temp.text1fenzi_column_predict;
	vector<vector<string> > text2fenci_column_predict = predict_temp.text2fenci_column_predict;
	vector<vector<string> > text2fenzi_column_predict = predict_temp.text2fenzi_column_predict;
	
	for(int i=0; i<text1fenci_column_predict.size(); i++)
	{
		vector<string> chars1=text1fenci_column_predict[i];
		vector<string> words1=text1fenci_column_predict[i];
		vector<string> chars2=text2fenci_column_predict[i];
		vector<string> words2=text2fenci_column_predict[i];
	
		map<string, float> tf_text1;
		map<string, float>::iterator iter;
		for(int k=0;k<words1.size();k++){
				iter = tf_text1.find(words1[k]);  
				if(iter != tf_text1.end()){
				   tf_text1[words1[k]]+=1.0/words1.size();
				}else{  
				   tf_text1[words1[k]]=1.0/words1.size();
				}
		}

		VectorXf one_vector = VectorXf::Zero(word2id.size()); // TODO OOV !!!
        for(int i=0;i<words1.size();i++){		
		      string word = words1[i];
			  float tfidf = idf_word[word]*tf_text1[word];
			 
			  int id = word2id[word];
			  if(one_vector(id)>-1e+5 && one_vector(id)<1e-5){
			    one_vector(id)=tfidf;
			  }else{
				one_vector(id)=one_vector(id)+tfidf;  
			  }
	    }
		map<string, float> tf_text2;
		
		for(int k=0;k<words2.size();k++){
				iter = tf_text2.find(words2[k]);  
				if(iter != tf_text2.end()){
				   tf_text2[words2[k]]+=1.0/words2.size();
				}else{  
				   tf_text2[words2[k]]=1.0/words2.size();
				}
		}

		VectorXf one_vector2 = VectorXf::Zero(word2id.size());
	    for(int i=0;i<words2.size();i++){	
			  string word = words2[i];
			  float tfidf = idf_word[word]*tf_text2[word];
			 
			  int id = word2id[word];
			  if(one_vector2(id)>-1e+5 && one_vector2(id)<1e-5){
			    one_vector2(id)=tfidf;
			  }else{
				one_vector2(id)=one_vector2(id)+tfidf;  
			  }
		}
		feature_result_for_predict.tfidf_word_canberra.push_back(ScipyDistance::canberra(one_vector,one_vector2));
		feature_result_for_predict.tfidf_word_chebyshev.push_back(ScipyDistance::chebyshev(one_vector,one_vector2));
		feature_result_for_predict.tfidf_word_braycurtis.push_back(ScipyDistance::braycurtis(one_vector,one_vector2));
		feature_result_for_predict.tfidf_word_sokalmichener.push_back(ScipyDistance::sokalmichener(one_vector,one_vector2));
		feature_result_for_predict.tfidf_word_yule.push_back(ScipyDistance::yule(one_vector,one_vector2));
	}
	cout<<"compute predict tfidf end"<<endl;
	this->read_stopword("stop_words.utf8");
	this->build_weight();
}

void DataFrame::compute_tfidf(TrainTemp train_temp)
{
	vector<vector<string> > text1fenzi_column = train_temp.text1fenzi_column;
	vector<vector<string> > text2fenzi_column = train_temp.text2fenzi_column;
	vector<vector<string> > text1fenci_column = train_temp.text1fenci_column;
	vector<vector<string> > text2fenci_column = train_temp.text2fenci_column;
	
	vector<map<string, float> > column1_tf_word;//should not be here
	vector<map<string, float> > column2_tf_word;//should not be here
	char unk[]={"UNK"};
    string s=unk;
	map<string, int>::iterator iter1; 
	map<string, float>::iterator iter2; 
    word2id[s]=1;
	idf_word[s]=1;
	word2count[s]=1;
	id2word.push_back(s);
    int word_count = 1;		
	for(int i=0; i<text1_column.size(); i++)
	{
		vector<string> chars1=text1fenzi_column[i];
		vector<string> words1=text1fenci_column[i];
	
		map<string, float> word2count_text1;
		
		//compute word2count for text1
		//compute tf value for text1, word level
		for(int k=0;k<words1.size();k++){
			//if(is_chinese(words1[k])){
				iter1 = word2id.find(words1[k]);  
				if(iter1 != word2id.end()){
					word2count[words1[k]]=word2count[words1[k]]+1;
				}else{  
					word2count[words1[k]]=1;
					word2id[words1[k]]=word_count++;
					id2word.push_back(words1[k]);
				}
				
				iter2 = word2count_text1.find(words1[k]);  
				if(iter2 != word2count_text1.end()){
				   word2count_text1[words1[k]]+=1.0/words1.size();
				}else{  
				   word2count_text1[words1[k]]=1.0/words1.size();
				}
			//}
		}
		
		//compute char2count for text1
		for(int k=0;k<chars1.size();k++){
			//if(is_chinese(chars1[k])){
				iter1 = word2id.find(chars1[k]);  
				if(iter1 != word2id.end()){
					char2count[chars1[k]]=char2count[chars1[k]]+1;
				}else{  
					char2count[chars1[k]]=1;
					word2id[chars1[k]]=word_count++;
					id2word.push_back(chars1[k]);
				}
			//}
		}
		
		//compute idf value for text1, word level
		map<string,int> temp_map1;
		for(int k=0;k<words1.size();k++){
			//if(is_chinese(words1[k])){
				iter1 = temp_map1.find(words1[k]);
				if(iter1 != temp_map1.end()){
				   continue;
				}else{  
				   temp_map1[words1[k]]=1;
				}

			    iter2 = idf_word.find(words1[k]);
				if(iter2 != idf_word.end()){
				   idf_word[words1[k]]+=1;
				}else{  
				   idf_word[words1[k]]=1;
				}
			//}
		}	
		
		column1_tf_word.push_back(word2count_text1);		
		
		vector<string> chars2=text2fenzi_column[i];
		vector<string> words2=text2fenci_column[i];
		
		map<string, float> word2count_text2;
		
		//compute word2count for text2
		//compute tf value for text2, word level
		for(int k=0;k<words2.size();k++){
			//if(is_chinese(words2[k])){
				iter1 = word2id.find(words2[k]);			
				if(iter1 != word2id.end()){
					word2count[words2[k]]=word2count[words2[k]]+1;
				}else{
					word2count[words2[k]]=1;				
					word2id[words2[k]]=word_count++;
					id2word.push_back(words2[k]);
				}
				
				iter2 = word2count_text2.find(words2[k]);  
				if(iter2 != word2count_text2.end()){
				   word2count_text2[words2[k]]+=1.0/words2.size();
				}else{  
				   word2count_text2[words2[k]]=1.0/words2.size();
				}
			//}
		}
		
		//compute char2count for text2
		for(int k=0;k<chars2.size();k++){ 
			//if(is_chinese(chars2[k])){
				iter1 = word2id.find(chars2[k]);  
				if(iter1 != word2id.end()){
					char2count[chars2[k]]=char2count[chars2[k]]+1;
				}else{  
					char2count[chars2[k]]=1;
					word2id[chars2[k]]=word_count++;
					id2word.push_back(chars2[k]);
				}
			//}
		}
		
		map<string,int> temp_map2;
		//compute idf value for text2, word level
		for(int k=0;k<words2.size();k++){
			//if(is_chinese(words2[k])){
				
				iter1 = temp_map2.find(words2[k]);
				if(iter1 != temp_map2.end()){
				   continue;
				}else{  
				   temp_map2[words2[k]]=1;
				}
				
			    iter2 = idf_word.find(words2[k]);
				if(iter2 != idf_word.end()){
				   idf_word[words2[k]]+=1;
				}else{  
				   idf_word[words2[k]]=1;
				}
			//}
		}	
		
		column2_tf_word.push_back(word2count_text2);
		
	}
	cout<<"build word2id end"<<endl;
	cout<<"build tf end"<<endl;
    cout<<"word2id size "<<word2id.size()<<endl;
	cout<<"id2word size "<<id2word.size()<<endl;
    map<string,float>::iterator iter3;
	for(iter3=idf_word.begin();iter3!=idf_word.end();iter3++)    
	{   
		idf_word[iter3->first]=std::log(text1fenci_column.size()*2/(iter3->second+1));
		
		if(abs(idf_word[iter3->first])>1e+5){
			cout<<idf_word[iter3->first]<<endl;
			cout<<"~~~"<<endl;
		}
	}   
	cout<<"build idf end"<<endl;
	int count = 0;
	for(int i=0;i<text1fenci_column.size();i++){
		VectorXf one_vector = VectorXf::Zero(word2id.size());
		for(int j=0;j<text1fenci_column[i].size();j++){
			string word = text1fenci_column[i][j];
			//if(is_chinese(word)){
			  float tfidf = idf_word[word]*column1_tf_word[i][word];
			  if(tfidf>1e+5){
				  tfidf=1e+5;
				  cout<<tfidf<<endl;
				  cout<<"!!!"<<endl;
		      }	
			  if(tfidf<-1e+5){
				  tfidf=-1e+5;
				  cout<<tfidf<<endl;
				  cout<<"!!!"<<endl;
		      }	
			  int id = word2id[word];
			  if(one_vector(id)>-1e+5 && one_vector(id)<1e-5){
			    one_vector(id)=tfidf;
			  }else{
				one_vector(id)=one_vector(id)+tfidf;  
			  }
			//}
	    }
		VectorXf one_vector2 = VectorXf::Zero(word2id.size());
	    for(int j=0;j<text2fenci_column[i].size();j++){
			string word2 = text2fenci_column[i][j];
			//if(is_chinese(word2)){
			  float tfidf = idf_word[word2]*column2_tf_word[i][word2];
			  if(tfidf>1e+5){
				  tfidf=1e+5;
				  cout<<tfidf<<endl;
				  cout<<"!!!"<<endl;
		      }	
			  if(tfidf<-1e+5){
				  tfidf=-1e+5;
				  cout<<tfidf<<endl;
				  cout<<"!!!"<<endl;
		      }	
			  int id = word2id[word2];
			  if(one_vector2(id)>-1e+5 && one_vector2(id)<1e-5){
			    one_vector2(id)=tfidf;
			  }else{
				one_vector2(id)=one_vector2(id)+tfidf;  
			  }
			//}
		}
		
		feature_result.tfidf_word_canberra.push_back(ScipyDistance::canberra(one_vector,one_vector2));
		feature_result.tfidf_word_chebyshev.push_back(ScipyDistance::chebyshev(one_vector,one_vector2));
		feature_result.tfidf_word_braycurtis.push_back(ScipyDistance::braycurtis(one_vector,one_vector2));
		feature_result.tfidf_word_sokalmichener.push_back(ScipyDistance::sokalmichener(one_vector,one_vector2));
		feature_result.tfidf_word_yule.push_back(ScipyDistance::yule(one_vector,one_vector2));
		
		count+=1;
		if(count%10000==0){
          cout<<count<<endl;
		}			
	}
	cout<<count<<endl;
	cout<<"build tfidf end"<<endl;
	this->read_stopword("stop_words.utf8");
	this->build_weight();
}


void DataFrame::build_weight()
{
	cout<<"word2count size "<<word2count.size()<<endl;
	cout<<"word2id size "<<word2id.size()<<endl;
	map<string,int>::iterator iter;
	for(iter=word2id.begin();iter!=word2id.end();iter++)    
	{
		weight.push_back(1);
	}
	for(iter=word2id.begin();iter!=word2id.end();iter++)    
	{
		weight[word2id[iter->first]]=iter->second;
	}
	cout<<"weight size "<<weight.size()<<endl;

}

void DataFrame::read_stopword(const char * filename)
{
	
	ifstream infile;
	infile.open(filename); 
	if(!infile.is_open())
	{
		cout << "打开文件出错" << endl;
		return;
	}
	string line;
	set<int> stopword_set;
    while(getline(infile,line))
    {
		map<string,int>::iterator search = word2id.find(line);
        if(search != word2id.end()){
		    stopword_set.insert(word2id[line]);
	    }
    }
	cout<<"read stopword rows "<<stopword_set.size()<<endl;
	map<string,int>::iterator iter;
	for(iter=word2id.begin();iter!=word2id.end();iter++)    
	{
		stopword_vector.push_back(false);
	}
	
	for (set<int>::iterator i=stopword_set.begin(); i!=stopword_set.end(); i++){
		stopword_vector[(*i)]=true;
	//	cout<<(*i)<<endl;
	}
	
    infile.close();
    cout<<"read stopword end"<<endl;
	
}

PredictTemp DataFrame::fenci_for_predict(string input1,string input2)
{
	    vector<vector<string> > text1fenci_column_predict;
	    vector<vector<string> > text1fenzi_column_predict;
	    vector<vector<string> > text2fenci_column_predict;
	    vector<vector<string> > text2fenzi_column_predict;
	
	    vector<string> chars1;
		vector<string> words1;
		this->get_jieba()->Cut(input1.c_str(), words1, false);
	    Char2Token(input1.c_str(),chars1);
		text1fenci_column_predict.push_back(words1);
		text1fenzi_column_predict.push_back(chars1);
		vector<string> chars2;
		vector<string> words2;
		this->get_jieba()->Cut(input2.c_str(), words2, false);
	    Char2Token(input2.c_str(),chars2);
		text2fenci_column_predict.push_back(words2);
		text2fenzi_column_predict.push_back(chars2);
		
		PredictTemp predict_temp;
		predict_temp.text1fenci_column_predict = text1fenci_column_predict;
		predict_temp.text2fenci_column_predict = text2fenci_column_predict;
		predict_temp.text1fenzi_column_predict = text1fenzi_column_predict;
		predict_temp.text2fenzi_column_predict = text2fenzi_column_predict;
		return predict_temp;
}	

TrainTemp DataFrame::read_csv(const char * filename)
{   
    TrainTemp train_temp;
	vector<vector<string> > text1fenci_column;
	vector<vector<string> > text1fenzi_column;
	vector<vector<string> > text2fenci_column;
	vector<vector<string> > text2fenzi_column;
    ifstream infile;
	infile.open(filename); 
	if(!infile.is_open())
	{
		cout << "打开文件出错" << endl;
	}
	string line;
    while(getline(infile,line))
    { 
        vector<string> split_result;
        split_string(line,"\t",split_result);
		text1_column.push_back(split_result[0]);
		text2_column.push_back(split_result[1]);
		label_column.push_back(atoi(split_result[2].c_str()));
		
		vector<string> chars1;
		vector<string> words1;
		this->get_jieba()->Cut(split_result[0].c_str(), words1, false);
	    Char2Token(split_result[0].c_str(),chars1);
		
		vector<string> chars1_;
		vector<string> words1_;
		for(int i=0;i<chars1.size();i++){
			if(is_chinese(chars1[i])){
				chars1_.push_back(chars1[i]);
			}
		}
		for(int i=0;i<words1.size();i++){
			if(is_chinese(words1[i])){
				words1_.push_back(words1[i]);
			}
		}
				
		text1fenci_column.push_back(words1_);
		text1fenzi_column.push_back(chars1_);
		vector<string> chars2;
		vector<string> words2;
		this->get_jieba()->Cut(split_result[1].c_str(), words2, false);
	    Char2Token(split_result[1].c_str(),chars2);
		
		vector<string> chars2_;
		vector<string> words2_;
		for(int i=0;i<chars2.size();i++){
			if(is_chinese(chars2[i])){
				chars2_.push_back(chars2[i]);
			}
		}
		for(int i=0;i<words2.size();i++){
			if(is_chinese(words2[i])){
				words2_.push_back(words2[i]);
			}
		}
		
		text2fenci_column.push_back(words2_);
		text2fenzi_column.push_back(chars2_);
		
    }
	cout<<"read total rows "<<label_column.size()<<endl;
    infile.close();
    cout<<"read csv & fenci end"<<endl;
	train_temp.text1fenci_column = text1fenci_column;
	train_temp.text1fenzi_column = text1fenzi_column;
	train_temp.text2fenci_column = text2fenci_column;
	train_temp.text2fenzi_column = text2fenzi_column;
	return train_temp;
}

}