/**
 * @author phoenics@126com
 * @date 2017年11月7日 下午2:40:36
 * @version V1.0
 */

package com.jx.gocom.nlp.classify.webservice.service.impl;

import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;

import javax.annotation.PostConstruct;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.stereotype.Service;

import com.google.common.util.concurrent.AtomicDouble;
import com.hankcs.hanlp.HanLP;
//import com.hankcs.hanlp.dictionary.stopword.CoreStopWordDictionary;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import com.jx.gocom.nlp.classify.webservice.service.ClassifyService;

/**
 *
 */
//@Service
public class Bayes  implements ClassifyService{
	private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Bayes.class);
	private static double zoomFactor = 10.0f;
	// 所有文本数
	private AtomicLong actCount = new AtomicLong(0L);
	private AtomicDouble minF=new AtomicDouble(0.0);
	// 类别对应的txt文本数  
    private Map<String, Long> classP = new ConcurrentHashMap<>();  
	@Autowired
    JdbcTemplate jdbcTemplate;
	Segment seg;
	//@PostConstruct
	public void init() {
		creatBaseTable();
		creatClassifyTable();
		idfLimit();
		this.seg = HanLP.newSegment().enablePartOfSpeechTagging(true);
	}

	public void creatBaseTable() {
		String creatsql = "CREATE TABLE IF NOT EXISTS BASE(" + "   ID INTEGER PRIMARY KEY     AUTOINCREMENT,"
				+"MD5 VARCHAR(32) NOT NULL,"
				+ "   NAME          VARCHAR(128)    NOT NULL," + "   CONTENT           TEXT    NOT NULL);";
		String indexsql="CREATE INDEX IF NOT EXISTS MD5_IDX ON BASE(Md5)";
		jdbcTemplate.execute(creatsql);
		jdbcTemplate.execute(indexsql);
	}

	public void creatClassifyTable() {
		String creatsql = "CREATE TABLE IF NOT EXISTS CLASSIFY(" + "   ID INTEGER PRIMARY KEY     AUTOINCREMENT,"
				+ "   NAME          VARCHAR(128)    NOT NULL," + "   KEYWORD          VARCHAR(56)    NOT NULL," 
				+ "    FREQUENCY            FLOAT    NOT NULL);";
		String indexsql="CREATE INDEX IF NOT EXISTS NK_INDX ON CLASSIFY(NAME,KEYWORD);";
		String indexupdatesql="CREATE INDEX IF NOT EXISTS NK_UP_INDX ON CLASSIFY(KEYWORD);";
		jdbcTemplate.execute(creatsql);
		jdbcTemplate.execute(indexsql);
		jdbcTemplate.execute(indexupdatesql);
	}

	public void deleteClassifyTable() {
		String creatsql = "DELETE FROM CLASSIFY;";
		jdbcTemplate.execute(creatsql);
	}
	public void idfLimit() {
		String mixsql = "SELECT min(FREQUENCY) FROM CLASSIFY;";
		Double minF_t = jdbcTemplate.queryForObject(mixsql, Double.class);
		if(minF_t!=null) {
			minF.set(minF_t);
		}
	}
	@Override
	public void appendData(String fileClassify, String md5,String content) {
		String sql = "INSERT INTO BASE(NAME,MD5,CONTENT) VALUES ('" + fileClassify + "','"+ md5+"','" + content + "');";
		jdbcTemplate.execute(sql);
	}
	@Override
	public void backData( String md5) {
		String sql = "DELETE FROM BASE WHERE MD5='+md5+';";
		jdbcTemplate.execute(sql);
	}
	@Override
	public void deleteAllData() {
		String sql = "DELETE FROM BASE ;";
		jdbcTemplate.execute(sql);
		deleteClassifyTable() ;
		actCount.set(0L);
		classP.clear();
	}

	private boolean filtWord(Term term) {
		 // 除掉停用词
        if (term.nature == null) return false;
        String nature = term.nature.toString();
        char firstChar = nature.charAt(0);
        switch (firstChar)
        {
            case 'm':
            case 'b':
            case 'c':
            case 'e':
            case 'o':
            case 'p':
            case 'q':
            case 'u':
            case 'y':
            case 'z':
            case 'r':
            case 'w':
            {
                return false;
            }
            default:
            {
               // if (term.word.trim().length() > 1 && !CoreStopWordDictionary.contains(term.word))
            	 if (term.word.trim().length() > 1)
                {
                    return true;
                }
            }
            break;
        }

        return false;
	}

	private void classGroup2(String name, long doccount) {
		String allsql = "SELECT CONTENT  FROM BASE WHERE NAME='" + name + "';";
		SqlRowSet rs = jdbcTemplate.queryForRowSet(allsql);
		Map<String, Long> actCount = new LinkedHashMap<>();
		while (rs.next()) {
			String content = rs.getString("CONTENT");
			List<Term> terms = seg.seg(content);
			List<String> terms_dis = terms.stream().filter(this::filtWord).map(t -> t.word).distinct().filter(w->w.trim().length()>0)
					.collect(Collectors.toList());
			for (String w : terms_dis) {
				Long count_ = actCount.get(w);
				if (count_ == null) {
					count_ = new Long(1L);
				} else {
					count_ = new Long(count_.longValue() + 1L);
				}
				actCount.put(w, count_);
			}
		}
		for (String nameKey : actCount.keySet()) {
			float ret = 0F;
			Long count_ = actCount.get(nameKey);
			ret = (float) count_ / (float) doccount;
			String insql = "INSERT INTO CLASSIFY (NAME,KEYWORD,FREQUENCY) VALUES('" + name + "','" + nameKey + "',"
					+ ret + ");";
			jdbcTemplate.execute(insql);
		}
	}
	private void classGroup(Map<String,Long> idf_map,String name, long doccount) {
		String allsql = "SELECT CONTENT  FROM BASE WHERE NAME='" + name + "';";
		SqlRowSet rs = jdbcTemplate.queryForRowSet(allsql);
		Map<String, Long> actCount = new LinkedHashMap<>();
		while (rs.next()) {
			String content = rs.getString("CONTENT");
			List<Term> terms = seg.seg(content);
			List<String> terms_dis = terms.stream().filter(this::filtWord).map(t -> t.word).distinct().filter(w->w.trim().length()>0)
					.collect(Collectors.toList());
			for (String w : terms_dis) {
				Long count_ = actCount.get(w);
				if (count_ == null) {
					count_ = new Long(1L);
				} else {
					count_ = new Long(count_.longValue() + 1L);
				}
				actCount.put(w, count_);
			}
		}
		for (String nameKey : actCount.keySet()) {
			float ret = 0F;
			Long count_ = actCount.get(nameKey);
			if(idf_map==null) {
				idf_map=new LinkedHashMap<>();
			}
			if(idf_map.get(nameKey)==null) {
				idf_map.put(nameKey, count_);
			}else {
				long v=idf_map.get(nameKey).longValue();
				idf_map.put(nameKey, count_.longValue()+v);
			}
			ret = (float) count_ / (float) doccount;
			String insql = "INSERT INTO CLASSIFY (NAME,KEYWORD,FREQUENCY) VALUES('" + name + "','" + nameKey + "',"
					+ ret + ");";
			jdbcTemplate.execute(insql);
		}
	}

	/*private List<String> getTraningClassifications() {
		String countSql = "SELECT DISTINCT NAME FROM CLASSIFY;";
		return jdbcTemplate.queryForList(countSql, String.class);
	}*/
	@Override
	public void reTrainData() {
		resetCount();
		deleteClassifyTable();
		Map<String,Long> idf_map=new LinkedHashMap<>();
		for(String classname:classP.keySet()) {
			classGroup(idf_map,classname, classP.get(classname));
		}
		long allcount=actCount.get();
		for(String kw:idf_map.keySet()) {
			long j=idf_map.get(kw)+1L;
			double tmp=(double)allcount / (double)j;
			double idf=Math.log(tmp);
			String updataSql="UPDATE CLASSIFY SET FREQUENCY = FREQUENCY * "+idf+" WHERE KEYWORD='"+kw+"';";
			logger.info(updataSql);
			jdbcTemplate.execute(updataSql);
		}
		idfLimit();
	}
	@Override
	public Set<String> allClass(){
		return classP.keySet();
	}
	private void resetCount() {
		classP.clear();
		String countSql = "SELECT count(*)  FROM BASE;";
		Integer actCountAll = jdbcTemplate.queryForObject(countSql, Integer.class);
		if (actCountAll.intValue() == 0) {
			return;
		}
		actCount.set(actCountAll.longValue());
		idfLimit();
		countSql = "SELECT NAME, count(*) AS C  FROM BASE GROUP BY NAME;";
		List<Map<String, Object>> resList = jdbcTemplate.queryForList(countSql);
		for (Map<String, Object> map : resList) {
			String name = (String) map.get("NAME");
			Integer c = (Integer) map.get("C");
			classP.put(name,c.longValue());
		}
	}

	private float calculatePxc(String keyword, String Cj,float limit) {
		String sql = "SELECT * FROM CLASSIFY WHERE NAME='" + Cj + "' AND KEYWORD='" + keyword + "';";
		List<Map<String, Object>> resList = jdbcTemplate.queryForList(sql);
		float ret = limit;
		if (resList != null && resList.size() > 0) {
			Map<String, Object> z = resList.get(0);
			Double d = (Double) z.get("FREQUENCY");
			ret=d.floatValue();
		}
		return ret;
	}

	private float calcProd(List<String>  X, String Cj) {
		float ret = 1.0F;
		long allcount=actCount.get();
		Long docCountL=classP.get(Cj);
		if(docCountL==null) {
			return 0.0f;
		}
		long docCount=docCountL.longValue();
		Double r_t=minF.get();
		//float ret_t =  1.0f/(float)(Integer.MAX_VALUE);
		float ret_t =  r_t.floatValue()/10.0f;
		int s=0;
		for(String word:X) {
			float f=calculatePxc(word,Cj,ret_t);
			if(f==ret_t) {
				s++;
				logger.info("[#Evaluate#] keyword={},weight={},###=false",word,f*zoomFactor);
			}else {
				logger.info("[#Evaluate#] keyword={},weight={}",word,f*zoomFactor);
			}
			ret*=f*zoomFactor;
		}
		if(allcount==docCount) {
			if(X.size()-s<3) {
				if(ret<3.0) {
					return 0.0f;
				}
			}
		}else {
			if(X.size()-s<1) {
				return 0.0f;
			}
		}
		logger.info("[#Evaluate#] prior ={}",(float)docCount/(float)allcount);
		ret*=(float)docCount/(float)allcount;
		return ret;
	}
	private int compare(ClassifyResult p1 , ClassifyResult p2 ) {
		if(p1.probility>p2.probility) {
			return -1;
		}else if ( p1.probility < p2.probility) {
			 return 1 ; 
		}else {
			 return 0;  //相等为0
		}
	}
	@Override
	public String classify(String text) {
		TreeMap<ClassifyResult,String> weightMap=classifies(text);
		if(weightMap.size()<1) {
			return "";
		}
		return weightMap.firstKey().classification;
	}
	@Override
	public TreeMap<ClassifyResult,String> classifies(String text) {
		TreeMap<ClassifyResult,String> weightMap=new TreeMap<>(this::compare);
		if(actCount.get()==0 || classP.size()==0) {
			resetCount();
		}
		List<Term> terms = seg.seg(text);
		List<String> terms_dis = terms.stream().filter(this::filtWord).map(t -> t.word).distinct()
				.collect(Collectors.toList());
		logger.info("[#Evaluate#] [start] ... classify text={}",text);
		for (String className : classP.keySet()) {
			logger.info("[#Evaluate#]  classify className={}",className);
			ClassifyResult cr=new ClassifyResult();
			cr.classification=className;
			cr.probility=calcProd(terms_dis,className);
			logger.info("[#Evaluate#] probility ={}",cr.probility);
			if(cr.probility>0.0f) {
				weightMap.put(cr, "");
			}else {
				logger.info("[#Evaluate#] {} is Empty!!!",className);
			}
		}
		logger.info("[#Evaluate#] [end] ... classify text={}",text);
		return weightMap;
	}

}
