/**
 * @author phoenics@126com
 * @date 2017年11月7日 上午10:12:42
 * @version V1.0
 */

package com.jx.gocom.nlp.classify;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;

/**
 *
 */
public class BayesTrain {
	private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BayesTrain.class);
	// 每个类别对应的词典和频数  
    private Map<String, Map<String, Double>> classWordMap = new ConcurrentHashMap<>();  
    // 类别序号对应的实际名称  
    private Map<String, String> classMap = new HashMap<>();  
    // 类别对应的txt文本数  
    private Map<String, Integer> classP = new ConcurrentHashMap<>();  
    // 所有文本数  
    private AtomicInteger actCount = new AtomicInteger(0);  
    /** 
     * 训练数据 
     */ 
    public void realTrain(String text) {  
    	
    }
    
    
    
    
    public void realTrain(List<String[]> files) {  
    	Segment seg=HanLP.newSegment();
    	Map<String, Map<String, Long>> classWordCountMap =new HashMap<>();  
    	Map<String, Long> classDocCountMap =new HashMap<>();  
    	for(String[] file:files) {
    		String fileClassify=file[0];
    		String content=file[1];
    		List<Term> terms=seg.seg(content);
    		//List<String>words=terms.stream().map(t->t.word).collect(Collectors.toList());
    		Map<String,Long> wordCount=terms.stream().map(t->t.word).collect(Collectors.groupingBy(word->word, Collectors.counting()));
    		Map<String,Long> wordCount_class=classWordCountMap.get(fileClassify);
    		Long docCount=classDocCountMap.get(fileClassify);
    		if(docCount==null) {
    			classDocCountMap.put(fileClassify, 0L);
    		}else {
    			long ccount=docCount.longValue();
    			ccount++;
    			classDocCountMap.put(fileClassify, new Long(ccount));
    		}
    		if(wordCount_class==null) {
    			classWordCountMap.put(fileClassify, wordCount);
    		}else {
    			for(String key:wordCount.keySet()) {
    				Long c=wordCount_class.get(key);
    				if(c!=null) {
    					c=new Long(c.longValue()+wordCount.get(key).longValue());
    				}else {
    					c=wordCount.get(key);
    				}
    				wordCount_class.put(key, c);
    			}
    		}
    	}
    	//save ==========================================
    }
    
}
