package com.yx.crawler.analyse;

import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

public class ChineseWordSpliter {

	public ChineseWordSpliter() {
		// TODO Auto-generated constructor stub
	}

	public static LinkedHashMap<String, Double> tf_idfSplit(String content, IdfManager idf,Double tatalPercent) {
		// 分词
		Reader input = new StringReader(content);
		// 智能分词关闭（对分词的精度影响很大）
		IKSegmenter iks = new IKSegmenter(input, true);
		Lexeme lexeme = null;
		LinkedHashMap<String, Double> words = new LinkedHashMap<String, Double>();
		int size = 0;
		double sumTf_Idf=0;
		try {
			while ((lexeme = iks.next()) != null) {
				if (words.containsKey(lexeme.getLexemeText())) {
					words.put(lexeme.getLexemeText(), words.get(lexeme.getLexemeText()) + 1);
				} else {
					words.put(lexeme.getLexemeText(), (double) 1);
				}
				size++;
			}
			Set<String> keys = words.keySet();
			for (String key : keys) {
				Double value = words.get(key);
				double tf_idf = (Double.valueOf(value) / size) * idf.getIdf(key);
				if (tf_idf <= 0)continue;
				sumTf_Idf=sumTf_Idf+tf_idf;
				words.put(key, tf_idf);
			}
		} catch (IOException e) {
		}
		return getMostFrequentWords(sumTf_Idf*tatalPercent,words);
	}

	public static Set<String> getKeywords(String content) {
		Reader input = new StringReader(content);
		// 智能分词关闭（对分词的精度影响很大）
		IKSegmenter iks = new IKSegmenter(input, true);
		Lexeme lexeme = null;
		LinkedHashMap<String, Integer> words = new LinkedHashMap<String, Integer>();
		try {
			while ((lexeme = iks.next()) != null) {
				if (!words.containsKey(lexeme.getLexemeText())) {
					words.put(lexeme.getLexemeText(), 1);
				}
			}
		} catch (IOException e) {
		}
		return words.keySet();
	}

	public static LinkedHashMap<String, Double> getMostFrequentWords(Double tatalSumTf_Idf, Map<String, Double> words) {
		LinkedHashMap<String, Double> keywords = new LinkedHashMap<String, Double>();
		List<Map.Entry<String, Double>> info = new ArrayList<Map.Entry<String, Double>>(words.entrySet());
		Collections.sort(info, new Comparator<Map.Entry<String, Double>>() {
			public int compare(Map.Entry<String, Double> obj1, Map.Entry<String, Double> obj2) {
				return obj2.getValue().compareTo(obj1.getValue());
			}
		});
		double sum=0;
		for (int j = 0; j < info.size(); j++) {
			double tf_idf=info.get(j).getValue();			
			if (sum<=tatalSumTf_Idf) {
				sum=sum+tf_idf;
				keywords.put(info.get(j).getKey(), info.get(j).getValue());
			} else {
				break;
			}			
		}
		return keywords;
	}
	public static void main(String[] args) throws Exception {
		Set<String> words= getKeywords("一位成都的乌克兰美女偶然得知小区周围工地上干活的农民工因不会用网络，每年都买不到回家的火车票，就向中国朋友请教学会了如何在网上买火车票，并热心帮农民工抢春节回家的火车票，被大家称为 \" 成都最美老外 \"。网友：好想去美女小区的工地上搬砖。");
		for(String key:words){
			System.out.println(key);
		}
	}
}
