package stock;

  

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.corpus.occurrence.Occurrence;
import com.hankcs.hanlp.corpus.occurrence.PairFrequency;
import com.hankcs.hanlp.corpus.occurrence.TermFrequency;
import com.hankcs.hanlp.corpus.occurrence.TriaFrequency;
import com.hankcs.hanlp.seg.Other.AhoCorasickDoubleArrayTrieSegment;
import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.tokenizer.BasicTokenizer;

/**
 * @Author：sks
 * @Description：
 * @Date：Created in 16:00 2018/1/19
 * @Modified by：
 **/
public class hanlp_test {
     public static void main(String[] args) {
    	// demoAtFirstSight();
    	 String all ="电商概念,电商概念,文化传媒,湖南板块,知识产权,破净股,富时罗素,深股通,融资融券,短剧互动游戏,短剧互动游戏,短剧互动游戏,短剧互动游戏,广电,华为概念,影视概念,5G概念,IPO受益,电商概念,在线教育,手游概念,网络游戏,创投,文化传媒,浙江板块,预盈预增,机构重仓,短剧互动游戏,虚拟数字人,字节概念,影视概念,知识产权,网红直播,人工智能,国企改革,行业 文化传媒,山东板块,创业板综,机构重仓,QFII重仓,人工智能,高送转,在线教育,";
    	//List<String> list =HanLP.extractKeyword(all, 100);
//    	for (int i = 0; i < list.size(); i++) {
//    		if(list.get(i).length()<=1) {
//    			continue;
//    		}
//			System.out.println(list.get(i));
//		}
    	 
    	 Occurrence occurrence = new Occurrence();
         occurrence.addAll(all);
         occurrence.compute();
 
    	 Set<Map.Entry<String, TermFrequency>> uniGram = occurrence.getUniGram();
         for (Map.Entry<String, TermFrequency> entry : uniGram)
         {
             TermFrequency termFrequency = entry.getValue();
             System.out.println(termFrequency);
         } 
    	 AhoCorasickDoubleArrayTrieSegment segment = null;
		try {
			System.out.println(HanLP.Config.CustomDictionaryPath[0]);
			segment = new AhoCorasickDoubleArrayTrieSegment(HanLP.Config.CustomDictionaryPath[0]);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
         System.out.println(); 
         List<Term>list =  segment.seg(all);
         
         for (Iterator iterator = list.iterator(); iterator.hasNext();) {
			Term term = (Term) iterator.next();
			System.out.println(term.word);
		}
    	 
    	// DemoOccurrence();
	}
     public static void demoAtFirstSight() {
         System.out.println("首次编译运行时，HanLP会自动构建词典缓存，请稍候……");
     //    HanLP.Config.enableDebug();         // 为了避免你等得无聊，开启调试模式说点什么:-)
         System.out.println(HanLP.segment("你好，欢迎使用HanLP汉语处理包！接下来请从其他Demo中体验HanLP丰富的功能~"));
         demoBasicTokenizer();
     }
     public static void demoBasicTokenizer(){
         String text = "举办纪念活动铭记二战历史，不忘战争带给人类的深重灾难，是为了防止悲剧重演，确保和平永驻；" +
                 "铭记二战历史，更是为了提醒国际社会，需要共同捍卫二战胜利成果和国际公平正义，" +
                 "必须警惕和抵制在历史认知和维护战后国际秩序问题上的倒行逆施。";
         System.out.println(BasicTokenizer.segment(text));
         // 测试分词速度，让大家对HanLP的性能有一个直观的认识
         long start = System.currentTimeMillis();
         int pressure = 100000;
         for (int i = 0; i < pressure; ++i)
         {
             BasicTokenizer.segment(text);
         }
         double costTime = (System.currentTimeMillis() - start) / (double) 1000;
         System.out.printf("BasicTokenizer分词速度：%.2f字每秒\n", text.length() * pressure / costTime);
     }
     
     public static void DemoOccurrence()
     {
         Occurrence occurrence = new Occurrence();
         occurrence.addAll("行业 文化传媒,地区 湖南板块,破净股,富时罗素,深股通,融资融券,短剧互动游戏,短剧互动游戏短剧互动游戏短剧互动游戏广电,华为概念,影视概念,5G概念,IPO受益,电商概念,在线教育,手游概念,网络游戏,创投,行业 文化传媒,地区 浙江板块,预盈预增,机构重仓,短剧互动游戏,虚拟数字人,字节概念,影视概念,知识产权,网红直播,人工智能,国企改革,行业 文化传媒,地区 山东板块,创业板综,机构重仓,QFII重仓,人工智能,高送转,在线教育,");
         occurrence.compute();

         Set<Map.Entry<String, TermFrequency>> uniGram = occurrence.getUniGram();
         for (Map.Entry<String, TermFrequency> entry : uniGram)
         {
             TermFrequency termFrequency = entry.getValue();
             System.out.println(termFrequency);
         }

//         Set<Map.Entry<String, PairFrequency>> biGram = occurrence.getBiGram();
//         for (Map.Entry<String, PairFrequency> entry : biGram)
//         {
//             PairFrequency pairFrequency = entry.getValue();
//             if (pairFrequency.isRight())
//                 System.out.println(pairFrequency);
//         }
//
//         Set<Map.Entry<String, TriaFrequency>> triGram = occurrence.getTriGram();
//         for (Map.Entry<String, TriaFrequency> entry : triGram)
//         {
//             TriaFrequency triaFrequency = entry.getValue();
//             if (triaFrequency.isRight())
//                 System.out.println(triaFrequency);
//         }
     }
}