/**
 * @author phoenics@126com
 * @date 2017年11月7日 上午11:31:46
 * @version V1.0
 */

package com.jx.gocom.nlp.classify;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;

/**
 *
 */
public class Bayes {
	private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Bayes.class);
	// 每个类别对应的词典和频数  
    private Map<String, Map<String, Long>> classWordMap = new ConcurrentHashMap<>();  
    Map<String, Long> classDocCountMap =new ConcurrentHashMap<>();  
    private List<String>classifies=new CopyOnWriteArrayList<>();
    // 所有文本数  
    private AtomicInteger actCount = new AtomicInteger(0);  
	public synchronized void reload() {
		//read data
	}
	public Map<String, Double> classify(String text) {  
		 Map<String, Double> frequencyOfType = new HashMap<String, Double>();  
		Segment seg=HanLP.newSegment();
		List<Term> terms=seg.seg(text);
		//Map<String,Long> wordCount=terms.stream().map(t->t.word).collect(Collectors.groupingBy(word->word, Collectors.counting()));
		List<String> wordList=terms.stream().map(t->t.word).distinct().collect(Collectors.toList());
		for(String classKey:this.classifies) {
			 double typeOfThis = 1.0;  
			 Map<String, Long> wordMap = classWordMap.get(classKey);  
			 Long articleCount=classDocCountMap.get(classKey);
			 for(String word:wordList) {
				 Long wordCount=wordMap.get(word);
				 long wordCount_=1L;
				 if(wordCount!=null) {
					 wordCount_=wordCount.longValue();
				 }
				 double term_frequency = (double)(wordCount_ / articleCount+1);  
				 typeOfThis = typeOfThis * term_frequency * 10;  
	             typeOfThis = ((typeOfThis == 0.0) ? Double.MIN_VALUE  : typeOfThis);  
			 }
			 typeOfThis = ((typeOfThis == 1.0) ? 0.0 : typeOfThis);  
			 //此类别文章出现的概率  
	          double classOfAll =((double) articleCount) / actCount.doubleValue(); 
	       // 根据贝叶斯公式 $(A|B)=S(B|A)*S(A)/S(B),由于$(B)是常数，在这里不做计算,不影响分类结果  
	           frequencyOfType.put(classKey, typeOfThis * classOfAll); 
		}
		return frequencyOfType;  
	}

}
