/**
 * @author phoenics@126com
 * @date 2017年10月9日 下午2:08:37
 * @version V1.0
 */

package phoenics.nlp.ml.classify;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.common.Term;

/**
 *
 */
public class ShortSentence {
	private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ShortSentence.class);
	private String triningFilePath=null;
	Map<String, Double> sampleRateMap = null;
	public ShortSentence() {}
	public ShortSentence(String path) {
		triningFilePath=path;
	}
	private void initRateMap() {
		Map<String, Double> readHold=readHoldTraining(this.triningFilePath);
		Map<String, Double> readExp=readExpTraining(this.triningFilePath);
		this.sampleRateMap=this.createProbabilityMap(readHold, readExp);
	}
	public double proSentence(String sentence, Map<String, Double> ratemap) {
		double rate = 1.0;  
        double tempRate = 1.0;  
        List<Term> terms=HanLP.newSegment().seg(sentence);
        for(Term term: terms) {
        	if (ratemap.containsKey(term.word)) {
        		 double tmp = ratemap.get(term.word);  
                 tempRate *= 1 - tmp;  
                 rate *= tmp;  
        	}
        }
        return rate / (rate + tempRate);  
	}
	private String takeTuple(String key,Double data) {
		StringBuffer sb=new  StringBuffer();
		sb.append(key);
		sb.append(" : ");
		sb.append(data);
		sb.append("\n");
		return sb.toString();
	}
	private void saveTraining(String sentence,String path,String filename) {
		Map<String, Double> createSampleMapStr=createSampleMap(sentence);
		String tmp=createSampleMapStr.keySet().stream().map(key->this.takeTuple(key, createSampleMapStr.get(key))).collect(Collectors.joining());
		BufferedWriter out;
		try {
			out = new BufferedWriter(new FileWriter(path+"/"+filename, true));
			out.write(tmp);
			out.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	 
	}
	
	@SuppressWarnings("resource")
	private Map<String, Double> readTraining(String path,String filename) {
		try {
			return new BufferedReader(new FileReader(path+"/"+filename)).lines().collect(Collectors.toMap(line->line.split(" : ")[0], line->Double.parseDouble(line.split(" : ")[1]),(key1,key2)->key2));
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}
	public void saveHoldTraining(String sentence) {
		if(this.triningFilePath==null) {
			return;
		}
		saveHoldTraining(sentence,this.triningFilePath);
	}
	public void saveHoldTraining(String sentence,String path) {
		saveTraining(sentence,path,"hold.tra");
	}
	public void saveExpTraining(String sentence) {
		if(this.triningFilePath==null) {
			return;
		}
		saveExpTraining(sentence,this.triningFilePath);
	}
	public void saveExpTraining(String sentence,String path) {
		saveTraining(sentence,path,"expellent.tra");
	}
	public Map<String, Double> readHoldTraining(String path) {
		return readTraining(path,"hold.tra");
	}
	public Map<String, Double> readExpTraining(String path) {
		return readTraining(path,"expellent.tra");
	}
	public Map<String, Double> createSampleMap(String sentence) { 
		List<Term> terms=HanLP.newSegment().seg(sentence);
		//Map<String, Integer> tmpmap = new HashMap<>();  
        //Map<String, Double> retmap = new HashMap<>();  
       // double rate = 0.0;  
        //int count = 0;  
        Map<String, Integer> tmpmap=terms.stream().collect(Collectors.groupingBy(term->term.word,Collectors.summingInt(p -> 1)));
       final double size=(double)terms.size();
       Map<String, Double> retmap = tmpmap.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey,e->((double)e.getValue())/size,(key1, key2)->key2, LinkedHashMap::new));
      // Map<String, Double> retmap2 = tmpmap.keySet().stream().collect(Collectors.toMap(k->k,k->((double)tmpmap.get(k))/size,(key1, key2)->key2,LinkedHashMap::new));
        
        
        
       /* for(Term term: terms) {
        	tmpmap.put(term.word, tmpmap.containsKey(term.word) ? count + 1 : 1); 
        }*/

       /* for (Iterator<String> iter = tmpmap.keySet().iterator(); iter.hasNext();) {  
            String key = iter.next();  
            rate = tmpmap.get(key) / terms.size();  
            retmap.put(key, rate);  
        }  */
        return retmap;  
	}
	public boolean trySen(String sentence) {
		if(this.triningFilePath==null) {
			return false;
		}
		if(this.sampleRateMap==null) {
			initRateMap();
		}
		if(this.sampleRateMap==null) {
			return false;
		}
		double probability =this.proSentence(sentence,this.sampleRateMap);
		return probability>0.5D?true:false;
	}
	 public Map<String, Double> createProbabilityMap(Map<String, Double> spammap,  Map<String, Double> okmap) {  
		 Map<String, Double> retmap = new HashMap<>();  
        for (Iterator<String> iter = spammap.keySet().iterator(); iter.hasNext();) {  
            String key =iter.next();  
            double rate = spammap.get(key);  
            double allRate = rate;  
            if (okmap.containsKey(key)) {  
                allRate += okmap.get(key);  
            }  
            retmap.put(key, rate / allRate);  
        }  
        return retmap;  
	 }

	/**
	 * @return the triningFilePath
	 */
	public String getTriningFilePath() {
		return triningFilePath;
	}
	/**
	 * @param triningFilePath the triningFilePath to set
	 */
	public void setTriningFilePath(String triningFilePath) {
		this.triningFilePath = triningFilePath;
	}
	
}
