package com.samp.algorithm.textrank;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;

import com.hankcs.hanlp.dictionary.stopword.CoreStopWordDictionary;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.tokenizer.StandardTokenizer;
import com.samp.base.file.FileUtilsDemo;
import com.samp.util.FilesUtils;

public class HanLPDemo {
	
    final static float d = 0.85f;
    /**
     * 最大迭代次数
     */
    public static int max_iter = 200;
    final static float min_diff = 0.001f;
	
	 public Map<String, Float> getRank(List<Term> termList)
	    {
	        List<String> wordList = new ArrayList<String>(termList.size());
	        for (Term t : termList)
	        {
	            if (shouldInclude(t))
	            {
	                wordList.add(t.word);
	            }
	        }
//	        System.out.println(wordList);
	        Map<String, Set<String>> words = new TreeMap<String, Set<String>>();
	        Queue<String> que = new LinkedList<String>();
	        for (String w : wordList)
	        {
	            if (!words.containsKey(w))
	            {
	                words.put(w, new TreeSet<String>());
	            }
	            // 复杂度O(n-1)
	            if (que.size() >= 5)
	            {
	                que.poll();
	            }
	            for (String qWord : que)
	            {
	                if (w.equals(qWord))
	                {
	                    continue;
	                }
	                //既然是邻居,那么关系是相互的,遍历一遍即可
	                words.get(w).add(qWord);
	                words.get(qWord).add(w);
	            }
	            que.offer(w);
	        }
//	        System.out.println(words);
	        Map<String, Float> score = new HashMap<String, Float>();
	        for (int i = 0; i < max_iter; ++i)
	        {
	            Map<String, Float> m = new HashMap<String, Float>();
	            float max_diff = 0;
	            for (Map.Entry<String, Set<String>> entry : words.entrySet())
	            {
	                String key = entry.getKey();
	                Set<String> value = entry.getValue();
	                m.put(key, 1 - d);
	                for (String element : value)
	                {
	                    int size = words.get(element).size();
	                    if (key.equals(element) || size == 0) continue;
	                    m.put(key, m.get(key) + d / size * (score.get(element) == null ? 0 : score.get(element)));
	                }
	                max_diff = Math.max(max_diff, Math.abs(m.get(key) - (score.get(key) == null ? 0 : score.get(key))));
	            }
	            score = m;
	            if (max_diff <= min_diff) break;
	        }

	        return score;
	    }

	 
	   public boolean shouldInclude(Term term)
	    {
	        // 除掉停用词
	        if (term.nature == null) return false;
	        String nature = term.nature.toString();
	        char firstChar = nature.charAt(0);
	        switch (firstChar)
	        {
	            case 'm':
	            case 'b':
	            case 'c':
	            case 'e':
	            case 'o':
	            case 'p':
	            case 'q':
	            case 'u':
	            case 'y':
	            case 'z':
	            case 'r':
	            case 'w':
	            {
	                return false;
	            }
	            default:
	            {
	                if (term.word.trim().length() > 1 && !CoreStopWordDictionary.contains(term.word))
	                {
	                    return true;
	                }
	            }
	            break;
	        }

	        return false;
	    }

	public static void main(String[] args) {
		HanLPDemo demo = new HanLPDemo();
		Segment defaultSegment = StandardTokenizer.SEGMENT;
		String filePath = "D:\\temp\\docs\\002.txt";
		String fileContent = FilesUtils.getTextFromPath(filePath);
		List<Term> termList = defaultSegment.seg(fileContent);
		Map<String, Float> map = demo.getRank(termList);
		System.out.println(map);
	}

}
