package com.mijie.homi.test;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;

import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.wltea.analyzer.lucene.IKAnalyzer;
import org.wltea.analyzer.lucene.NatureAttribute;

public class NaiveBayesianClassifier {

	private Map<String,Map<Integer,WordStat>> tfCounter = null;
	private Map<Integer,Integer> docsWithType = new HashMap<Integer,Integer>();
	private Map<Integer,Integer> typeFreq = new HashMap<Integer,Integer>();
	private Map<Integer,Integer> wordsWithType = new HashMap<Integer,Integer>();
	private IKAnalyzer analyzer = new IKAnalyzer(true);
	private int amountOfDoc = 0;
	private int amountOfWord = 0;
	private static String[] typeName = {"","恋爱男女","婚姻家庭","夫妻关系","婆媳关系","长辈代沟","晚辈代沟","职场烦恼","学习烦恼","朋友关系","事业前途","自身烦恼","社会现象"};
	
	public static void main(String[] args) throws IOException{
		NaiveBayesianClassifier ct = new NaiveBayesianClassifier();
		String line = "你会不会忽然的出现 ，在街角的咖啡店 ，我会带着笑脸，和你寒暄 ，不去说从前，只是寒暄 ，对你说一句，只是说一句 ，好久不见...";
		long s = System.currentTimeMillis();
		int type = ct.classify(line);
		long e = System.currentTimeMillis();
		System.out.println(line);
		if(type==0) System.err.println("classifier fail.");
		else System.err.println("type["+type+"]=>"+typeName[type]+",spend "+(e-s)+" millis.");
		
	}
	
	public NaiveBayesianClassifier() throws IOException{
		tfCounter = loadTFCounter();
		Map<Integer,WordStat> counter = null;
		Integer freq = null;
		for(String key : tfCounter.keySet()){
			counter = tfCounter.get(key);
			for(Integer type : counter.keySet()){
				freq = typeFreq.get(type);
				if(freq==null){
					freq = 0;
				}
				typeFreq.put(type, freq+1);
			}
		}
	}
	
	private Map<Integer,Integer> getPossibleTypes(Collection<String> tokens){
		Map<Integer,Integer> possibleTypes = new HashMap<Integer,Integer>();
		Map<Integer,WordStat> counter = null;
		Integer count = null;
		for(String token : tokens){
			counter = tfCounter.get(token);
			if(counter==null) continue;
			for(Integer type : counter.keySet()){
				count = possibleTypes.get(type);
				if(count==null) count = 0;
				possibleTypes.put(type, count+1);
			}
		}
		return possibleTypes;
	}
	
	
	private int getTypeSize(){
		return typeName.length-1;
	}
	
	public  Integer classify(String line) throws IOException{
		Map<Integer,WordStat> counter = null;
		List<String> tokens = getTokenForClassifyTraining(line);
		Map<Integer,Double> probabilityMap = new HashMap<Integer,Double>();
		Double probability = null;
		double probabilityOfWordForType = 0d;
		double p4type = 0d;
		double sumOfProbability = 0d;
		WordStat stat = null;
		double rate = 0d;
		Map<Integer,Integer> possibleTypes = null;
		double avgOfProbability = 0d;
		int baselineOfRare = 0;
		int unknowCount = 0;
		for(String token : tokens){
			counter = tfCounter.get(token);
			if(counter==null){
				System.err.println(token+" first meet.");
				unknowCount++;
				continue;
			}
			
			for(Integer type : counter.keySet()){
				stat = counter.get(type);
				sumOfProbability+=(double)stat.freq()/((double)docsWithType.get(type)/amountOfDoc*wordsWithType.get(type));;
			}
			avgOfProbability = sumOfProbability/counter.keySet().size();
			
			for(Integer type : counter.keySet()){
				baselineOfRare = (int) Math.floor(getTypeSize()*0.3);
				stat = counter.get(type);
				probabilityOfWordForType = (double)stat.freq()/((double)docsWithType.get(type)/amountOfDoc*wordsWithType.get(type));
				
				if(counter.keySet().size()<=baselineOfRare){
					probabilityOfWordForType += probabilityOfWordForType * 0.5;
					System.out.println("$$$$"+token+" is rare cause incre "+(probabilityOfWordForType * 0.5)+" probability");
				}else{
					if(probabilityOfWordForType<avgOfProbability){
						System.out.println(token+" ignored in type["+typeName[type]+"]");
						continue;
					}
				}
				
				
				
				System.out.println(token+" in type["+typeName[type]+"] tf:"+stat.freq()+",amount of word in this type:"+docsWithType.get(type)+",rate:"+stat.freq()/(double)docsWithType.get(type)+",belong this type probility:"+probabilityOfWordForType);
				if(probabilityMap.get(type)==null){
					probabilityMap.put(type, probabilityOfWordForType);
				}else{
					probabilityMap.put(type, probabilityMap.get(type)*probabilityOfWordForType);
				}
			}
		}
		
		double max = Double.MIN_VALUE;
		int guess = 0;
		double p = 0d;
		possibleTypes = getPossibleTypes(tokens);
		if(unknowCount<possibleTypes.size()*0.5){
			for(Integer type : probabilityMap.keySet()){
	//			p4type = (double)possibleTypes.get(type)/getTypeSize();
				p4type = (double)docsWithType.get(type)/((double)docsWithType.get(type)/amountOfDoc*amountOfDoc);
				
				p = probabilityMap.get(type) * p4type;
				System.out.println(typeName[type]+"["+type+":"+possibleTypes.get(type)+" freq]"+"=>"+p+",type probability=>"+p4type+",word probability=>"+probabilityMap.get(type));
				if(max<p){
					guess = type;
					max = p;
				}
			}
			for(String token : tokens){
				System.out.print(token+" ");
			}
			System.out.println();
		}
		return guess;
	}
	
	
	private  Map<String,Map<Integer,WordStat>> loadTFCounter() throws IOException{
		List<String> tokens = null;
		String[] entry = null;
		String[] types = null;
		String line = null;
		FileReader fr = new FileReader("/Users/zhengzb/Downloads/topic.txt");
		BufferedReader br = new BufferedReader(fr);
		Map<String,Map<Integer,WordStat>> tfCounter = new HashMap<String,Map<Integer,WordStat>>();
		Map<Integer,WordStat> counter = null;
		WordStat stat = null;
		Integer count = null;
		int lastIndexOf = -1;
		int t = 0;
		while((line=br.readLine())!=null){
			if(line.isEmpty()) continue;
			types = null;
			lastIndexOf = line.lastIndexOf(" ");
			if(lastIndexOf==-1){
				System.err.println(line);
				continue;
			}
			entry = new String[]{line.substring(0,lastIndexOf),line.substring(lastIndexOf+1)};
			if(entry.length!=2){
				System.err.println(line);
				continue;
			}
			types = entry[1].split(",");
			tokens = getTokenForClassifyTraining(entry[0]);
			
			for(String type : types){
				amountOfDoc++;
				t = Integer.parseInt(type);
				for(String token : tokens){
					amountOfWord++;
					counter = tfCounter.get(token);
					if(counter==null) counter = new HashMap<Integer,WordStat>();
					stat = counter.get(Integer.parseInt(type));
					if(stat==null) stat = new WordStat(t);
					stat.increFreq();
					for(String tp : types){
						stat.increTypeIntersect(Integer.parseInt(tp));
					}
					counter.put(t, stat);
					tfCounter.put(token, counter);
					
					count =wordsWithType.get(type);
					if(count==null) count=0;
					wordsWithType.put(t, count+1);
				}
				count = docsWithType.get(t);
				if(count==null) count=0;
				docsWithType.put(t, count+1);
			}//end foreach types
		}
		br.close();
		return tfCounter;
	}
	
	private List<String> getTokenForClassifyTraining(String str) throws IOException {
		StringReader reader = new StringReader(str); 
		TokenStream ts = analyzer.tokenStream(str, reader);
		ts.reset();
		CharTermAttribute termAtt = ts.getAttribute(CharTermAttribute.class);
		NatureAttribute nature = ts.getAttribute(NatureAttribute.class);
		
	    StringBuilder sb = new StringBuilder();
	    Set<String> bucket = new TreeSet<String>();
	    List<String> tokens = new ArrayList<String>();
		try {
			Set<String> cache = new HashSet<String>();
			String key = null;
			StringBuilder nat = new StringBuilder();
			while (ts.incrementToken()){
				key = termAtt.toString();
				nat.setLength(0);
				cache.clear();
				if(nature.natures()!=null){
					for(String s : nature.natures()){
						cache.add(s);
						nat.append(s+",");
					}
				}
				
				
				if(bucket.contains(key) || nat.length()==0) continue;
				
				if(cache.contains("v") || cache.contains("n") || cache.contains("adj")){
					tokens.add(key);
					bucket.add(key);
				}
			 }
			
		} catch (IOException e) {
			e.printStackTrace();
		}
		return tokens;
	}
	
	class Status implements Comparable<Status>{
		int type;
		double p;
		
		Status(){};
		
		Status(int type){
			this.type = type;
		}
		
		Status(int type,double p){
			this.type = type;
			this.p = p;
		}
		
		public int compareTo(Status target) {
			return p==target.p?0:p>target.p?1:-1;
		}
	}
}
