package com.mijie.homi.search.service.topic;

import java.io.IOException;
import java.io.StringReader;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.BooleanClause.Occur;
import org.springframework.stereotype.Service;
import org.wltea.analyzer.lucene.IKAnalyzer;
import org.wltea.analyzer.lucene.NatureAttribute;

import com.mijie.homi.search.domain.topic.Topic;

@Service
public class ClassifyService {

	//tf/idf
	private Analyzer analyzer = new IKAnalyzer(true);
	private Logger logger =  Logger.getLogger(ClassifyService.class);
	
	private boolean acceptNature(String nature){
		if(nature.equalsIgnoreCase("n") || nature.equalsIgnoreCase("v"))
			return true;
		return false;
	}
	
	public void beginTraining(){
		keywordsOfEachType.clear();
		termCountMap.clear();
		amountOfDocEachType.clear();
	}
	
	private Map<String,Set<String>> keywordsOfEachType = new HashMap<String,Set<String>>();
	private Map<String,Map<String,Integer>> termCountMap = new HashMap<String,Map<String,Integer>> ();
	private Map<String,Integer> amountOfDocEachType = new HashMap<String,Integer>();
	
	public void addToTraining(Topic topic) {
		Map<String,Integer> tcMap = null;
		Set<String> keywordsOfTitle = null;
		Set<String> keywordsOfType = null;
		Integer amountOfDoc = null;
		Integer tc = null;
		String[] typeEntry = topic.getType().split(",");
		Map<String,Integer> termCountCounter = countTermCount(topic.getTitle());
		keywordsOfTitle = termCountCounter.keySet();
		for(String keyword : keywordsOfTitle){
			tcMap = termCountMap.get(keyword);
			if(tcMap == null) tcMap = new HashMap<String,Integer>();
			for(String type : typeEntry){
				if(type.isEmpty()) continue;
				amountOfDoc = amountOfDocEachType.get(type);
				if(amountOfDoc==null) amountOfDoc = 0;
				amountOfDocEachType.put(type, amountOfDoc+1);
				tc = tcMap.get(type);
				if(tc==null) tc = 0;
				tcMap.put(type, tc+termCountCounter.get(keyword));
				keywordsOfType = keywordsOfEachType.get(type);
				if(keywordsOfType==null) keywordsOfType = new HashSet<String>();
				keywordsOfType.add(keyword);
				keywordsOfEachType.put(type, keywordsOfType);
			}
			termCountMap.put(keyword, tcMap);
		}
	}
	
	public int getKeywordSize(){
		return termCountMap.size();
	}
	
	public  Map<String,Integer> countTermCount(String str)  {
		StringReader reader = new StringReader(str); 
		Map<String,Integer> termCountCounter = new HashMap<String,Integer>();
	    StringBuilder sb = new StringBuilder();
	    String[] natures = null;
		try {
			TokenStream ts = analyzer.tokenStream(str, reader);
			CharTermAttribute termAtt = ts.getAttribute(CharTermAttribute.class);
			NatureAttribute natureAtr = ts.getAttribute(NatureAttribute.class);
			ts.reset();
			Integer count = null;
			while (ts.incrementToken()){
				if(termAtt.length()>=2){
					natures = natureAtr.natures();
					if(natures==null){
						continue;
					}
					for(String nature : natures){
						if(acceptNature(nature)){
							count = termCountCounter.get(termAtt.toString());
							if(count==null) count = 0;
							termCountCounter.put(termAtt.toString(), count+1);
						}
					}
				}
			 }
			ts.close();
		} catch (IOException e) {
			logger.error(e.getMessage(), e);
		}
		return termCountCounter;
	}
	
	public void training() throws IOException{
		countTFIDF();
	}
	
	private  Map<String,Map<String,Float>> tfidfMap = new  HashMap<String,Map<String,Float>>();
	private void countTFIDF() throws IOException{
		Map<String,Float> tfidfInEachType = null;
		Map<String,Integer> termCountInEachType = null;
		Set<String> typeSet = null;
		float tf = 0f;
		float idf = 0f;
		float rate = 0f;
		int tcExceptCurrentType = 0;
		Set<String> keywords = termCountMap.keySet();
		for(String keyword : keywords){
			termCountInEachType = termCountMap.get(keyword);
			typeSet = termCountInEachType.keySet();
			tfidfInEachType = new HashMap<String,Float>();
			for(String type : typeSet){
//				tf = (float)termCountMap.get(type)/(amountOfTermInEachType.get(type)/amountOfDocInEachType.get(type));
				for(String t : typeSet){
					if(!t.equals(type)){
						tcExceptCurrentType += termCountInEachType.get(t);
					}
				}
				rate = (float)keywordsOfEachType.get(type).size()/keywords.size();
				if(tcExceptCurrentType>0)
					tf = termCountInEachType.get(type)/rate/tcExceptCurrentType;
				else
					tf = termCountInEachType.get(type)/rate;
				idf = (float) Math.log(typeName.length-1/(float)typeSet.size());
				tfidfInEachType.put(type, tf*idf);
				tcExceptCurrentType = 0;
			}
			
			tfidfMap.put(keyword, tfidfInEachType);
		}
	}
	
	private String[] typeName = {"","恋爱男女","婚姻家庭","夫妻关系","婆媳关系","长辈代沟","晚辈代沟","职场烦恼","学习烦恼","朋友关系","事业前途","自身烦恼","社会现象"};
	
	public String getTypeName(int type){
		return typeName[type];
	}
	
	public  Set<String> getKeywords(String str) throws IOException {
		StringReader reader = new StringReader(str); 
		TokenStream ts = analyzer.tokenStream(str, reader);
		CharTermAttribute termAtt = ts.getAttribute(CharTermAttribute.class);
		NatureAttribute natureAtr = ts.getAttribute(NatureAttribute.class);
	    Set<String> tokenSet = new HashSet<String>();
	    Integer freq = null;
	    String token = null;
	    String[] natures = null;
	    ts.reset(); 
		try {
			while (ts.incrementToken()){
				token = termAtt.toString();
				natures = natureAtr.natures();
				if(natures==null || token.length()<=1){
					continue;
				}
				for(String nature : natures){
					if(acceptNature(nature)){
						tokenSet.add(token);
						break;
					}
				}
				
			 }
		} catch (IOException e) {
			e.printStackTrace();
		}
		return tokenSet;
	}
	
	public  String classify(String content) throws Exception{
		Map<String,Float> tfidfMapEachType = null;
		Set<String> tokenSet = getKeywords(content);
		Set<String> typeSet = null;
		Map<String,Float> scoreMap = new HashMap<String,Float>();
		Float score = null;
		String[] tokenType = null;
		int[] typeAppearTimes = new int[typeName.length];
		int firstMeet = 0;
		float tfidf = 0;
		String[] types = null;
		for(String token : tokenSet){
			tfidfMapEachType = tfidfMap.get(token);
			if(tfidfMapEachType!=null){
				typeSet = tfidfMapEachType.keySet();
				for(String type : typeSet){
					typeAppearTimes[Integer.parseInt(type)]++;
					score = scoreMap.get(type);
					if(score==null) score = 0f;
					tfidf = tfidfMapEachType.get(type);
					if(tfidf<5) continue;
					scoreMap.put(type, tfidfMapEachType.get(type)+score);
					logger.info(token+" tfidf in type["+getTypeName(Integer.parseInt(type))+"]:"+tfidf);
				}
			}else{
				firstMeet++;
				logger.info(token+" is first meet.");
			}
		}
		String hitType = null;
		float firstMeetRate = (float)firstMeet/tokenSet.size()*100;
		logger.info("=============summary[firstMeet:"+firstMeet+",tokenCount:"+tokenSet.size()+",rate:"+firstMeetRate+"%]============");
		if(firstMeetRate>=40){
			logger.info("classify fail");
		}else{
			typeSet = scoreMap.keySet();
			float hightScore = 0f;
			float scoreSum = 0f;
			for(String type : typeSet){
				if(scoreMap.get(type)>hightScore){
					hightScore = scoreMap.get(type);
					hitType = type;
				}
				scoreSum += scoreMap.get(type);
				System.out.println(getTypeName(Integer.parseInt(type))+" score:"+scoreMap.get(type)+",appear times:"+typeAppearTimes[Integer.parseInt(type)]);
			}
			if(hightScore>=scoreSum/scoreMap.size()){
				logger.info("=======================hit[score avg:"+scoreSum/scoreMap.size()+"]=============================");
				logger.info(getTypeName(Integer.parseInt(hitType))+"["+hitType+"] score:"+scoreMap.get(hitType)+",appear times:"+typeAppearTimes[Integer.parseInt(hitType)]);
			}else{
				logger.info("classify fail");
			}
		}
		return hitType;
	}
}
