/**
 * @author phoenics@126com
 * @date 2017年12月12日 下午3:36:51
 * @version V1.0
 */

package com.jx.gocom.nlp.classify.webservice.service.impl;

import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

import javax.annotation.PostConstruct;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.stereotype.Service;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import com.jx.gocom.nlp.classify.webservice.service.ClassifyServiceDim;

/**
 *
 */
@Service
public class BayesDim implements ClassifyServiceDim {
	private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BayesDim.class);
	private static double zoomFactor = 10.0f;
	public static final String NULLPOSITION = "<#NULL_POSITION#>";
	public static final String NULLTAG = "<#NULL_TAG#>";
	// 所有文本数 按照不同的数据源
	private Map<String, Long> actCount = new ConcurrentHashMap<>();
	// 最小的词权重 按照不同的数据源
	private Map<String, Double> minF = new ConcurrentHashMap<>();
	// 类别对应的txt文本数
	private Map<DimData, Long> classP = new ConcurrentHashMap<>();
	@Autowired
	JdbcTemplate jdbcTemplate;
	Segment seg;

	@PostConstruct
	public void init() {
		creatBaseTable();
		creatSourceTable();
		creatMetaBaseTable();
		creatClassifyTable();
		idfLimitAndAllcount();
		this.seg = HanLP.newSegment().enablePartOfSpeechTagging(true);
	}

	public void creatBaseTable() {
		String creatsql = "CREATE TABLE IF NOT EXISTS BASE(" 
				+ "   ID INTEGER PRIMARY KEY     AUTOINCREMENT,"
				+ "MD5 VARCHAR(32) NOT NULL," 
				+ "DATASOURCE VARCHAR(32) NOT NULL," 
				+ "POSITION VARCHAR(32) NOT NULL,"
				+ "TAG VARCHAR(32)  ," 
				+ "   NAME          VARCHAR(128)    NOT NULL,"
				+ "   CONTENT           TEXT    NOT NULL);";
		String indexsql = "CREATE INDEX IF NOT EXISTS MD5_IDX ON BASE(Md5)";
		jdbcTemplate.execute(creatsql);
		jdbcTemplate.execute(indexsql);
	}

	public void creatSourceTable() {
		String creatsql = "CREATE TABLE IF NOT EXISTS METASOURCE(" + "   ID INTEGER PRIMARY KEY     AUTOINCREMENT,"
				+ "DATASOURCE VARCHAR(32) NOT NULL," + "MINFREQUENCY            FLOAT    NOT NULL,"
				+ "   SOURCECOUNT            INTERGER    NOT NULL);";
		jdbcTemplate.execute(creatsql);
	}

	public void creatMetaBaseTable() {
		String creatsql = "CREATE TABLE IF NOT EXISTS METABASE(" + "   ID INTEGER PRIMARY KEY     AUTOINCREMENT,"
				+ "DATASOURCE VARCHAR(32) NOT NULL," + "POSITION VARCHAR(32) NOT NULL," + "TAG VARCHAR(32)  ,"
				+ "   NAME          VARCHAR(128)    NOT NULL," + "   CLASSCOUNT            INTERGER    NOT NULL);";
		String indexsql = "CREATE INDEX IF NOT EXISTS META_IDX ON METABASE(DATASOURCE,POSITION,TAG,NAME)";
		jdbcTemplate.execute(creatsql);
		jdbcTemplate.execute(indexsql);
	}

	public void creatClassifyTable() {
		String creatsql = "CREATE TABLE IF NOT EXISTS CLASSIFY(" + "   ID INTEGER PRIMARY KEY     AUTOINCREMENT,"
				+ "DATASOURCE VARCHAR(32) NOT NULL," + "POSITION VARCHAR(32) NOT NULL," + "TAG VARCHAR(32)  ,"
				+ "   NAME          VARCHAR(128)    NOT NULL," + "   KEYWORD          VARCHAR(56)    NOT NULL,"
				+ "    WORDCOUNT            INTERGER    NOT NULL," + "    FREQUENCY            FLOAT    NOT NULL);";
		String indexsql = "CREATE INDEX IF NOT EXISTS NK_INDX ON CLASSIFY(DATASOURCE,POSITION,TAG,NAME,KEYWORD);";
		String indexupdatesql = "CREATE INDEX IF NOT EXISTS NK_UP_INDX ON CLASSIFY(DATASOURCE,KEYWORD);";
		jdbcTemplate.execute(creatsql);
		jdbcTemplate.execute(indexsql);
		jdbcTemplate.execute(indexupdatesql);
	}

	public void deleteClassifyTable() {
		String delsqlclass = "DELETE FROM METASOURCE;";
		String delsqlmetabase = "DELETE FROM METABASE;";
		String delsqlmetasource = "DELETE FROM CLASSIFY;";
		jdbcTemplate.execute(delsqlclass);
		jdbcTemplate.execute(delsqlmetabase);
		jdbcTemplate.execute(delsqlmetasource);
	}

	public void idfLimitAndAllcount() {
		minF.clear();
		actCount.clear();
		String mixsql = "SELECT * FROM METASOURCE;";
		List<Map<String, Object>> resList = jdbcTemplate.queryForList(mixsql);
		for (Map<String, Object> map : resList) {
			String name = (String) map.get("DATASOURCE");
			Double c = (Double) map.get("MINFREQUENCY");
			Integer count = (Integer) map.get("SOURCECOUNT");
			actCount.put(name, count.longValue());
			minF.put(name, c);
		}
		resetClassCount();
		/*
		 * String mixsql =
		 * "SELECT min(FREQUENCY) AS M,DATASOURCE FROM CLASSIFY  GROUP BY DATASOURCE;";
		 * List<Map<String, Object>> resList = jdbcTemplate.queryForList(mixsql);
		 * minF.clear(); for (Map<String, Object> map : resList) { String name =
		 * (String) map.get("DATASOURCE"); Double c = (Double) map.get("M");
		 * minF.put(name, c); }
		 */
	}

	private void resetClassCount() {
		classP.clear();
		String countSql = "SELECT DATASOURCE,POSITION,TAG,NAME,CLASSCOUNT  FROM METABASE;";
		List<Map<String, Object>> resList = jdbcTemplate.queryForList(countSql);
		for (Map<String, Object> map : resList) {
			String s = (String) map.get("DATASOURCE");
			String p = (String) map.get("POSITION");
			String t = (String) map.get("TAG");
			String n = (String) map.get("NAME");
			Integer c = (Integer) map.get("CLASSCOUNT");
			classP.put(new DimData(s, p, t, n), c.longValue());
		}
	}

	/* (non-Javadoc)
	 * @see com.jx.gocom.nlp.classify.webservice.service.impl.ClassifyServiceDim#appendData(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
	 */
	@Override
	public void appendData(String dataSource, String position, String tag, String name, String md5, String content) {
		String sql = "INSERT INTO BASE(DATASOURCE,POSITION,TAG,NAME,MD5,CONTENT) VALUES (";
		StringBuffer sb = new StringBuffer();
		sb.append(sql);
		sb.append("'" + dataSource + "'");
		sb.append(",");
		sb.append("'" + position + "'");
		sb.append(",");
		sb.append("'" + tag + "'");
		sb.append(",");
		sb.append("'" + name + "'");
		sb.append(",");
		sb.append("'" + md5 + "'");
		sb.append(",");
		sb.append("'" + content + "'");
		sb.append(");");
		// + fileClassify + "','"+ md5+"','" + content + "');";
		//logger.info(sb.toString());
		jdbcTemplate.execute(sb.toString());
	}

	/* (non-Javadoc)
	 * @see com.jx.gocom.nlp.classify.webservice.service.impl.ClassifyServiceDim#appendData(com.jx.gocom.nlp.classify.webservice.service.impl.DimData, java.lang.String, java.lang.String)
	 */
	@Override
	public void appendData(DimData dimData, String md5, String content) {
		this.appendData(dimData.getDataSource(), dimData.getPosition(), dimData.getTag(), dimData.getName(), md5,
				content);
	}

	/* (non-Javadoc)
	 * @see com.jx.gocom.nlp.classify.webservice.service.impl.ClassifyServiceDim#backData(java.lang.String)
	 */
	@Override
	public void backData(String md5) {
		String sql = "DELETE FROM BASE WHERE MD5='+md5+';";
		jdbcTemplate.execute(sql);
	}

	/* (non-Javadoc)
	 * @see com.jx.gocom.nlp.classify.webservice.service.impl.ClassifyServiceDim#deleteAllData()
	 */
	@Override
	public void deleteAllData() {
		String sql = "DELETE FROM BASE ;";
		jdbcTemplate.execute(sql);
		deleteClassifyTable();
		minF.clear();
		actCount.clear();
		classP.clear();
	}

	private boolean filtWord(Term term) {
		// 除掉停用词
		if (term.nature == null)
			return false;
		String nature = term.nature.toString();
		char firstChar = nature.charAt(0);
		switch (firstChar) {
		case 'm':
		case 'b':
		case 'c':
		case 'e':
		case 'o':
		case 'p':
		case 'q':
		case 'u':
		case 'y':
		case 'z':
		case 'r':
		case 'w': {
			return false;
		}
		default: {
			// if (term.word.trim().length() > 1 &&
			// !CoreStopWordDictionary.contains(term.word))
			if (term.word.trim().length() > 1) {
				return true;
			}
		}
			break;
		}

		return false;
	}

	private void classGroup(DimData dimData, long doccount) {
		// String allsql = "SELECT CONTENT FROM BASE WHERE NAME='" + name + "';";
		StringBuffer sb = new StringBuffer();
		sb.append("SELECT CONTENT  FROM BASE ");
		sb.append(" WHERE ");
		sb.append(" DATASOURCE= ");
		sb.append("'" + dimData.getDataSource() + "'");
		sb.append(" AND ");
		sb.append(" POSITION= ");
		sb.append("'" + dimData.getPosition() + "'");
		sb.append(" AND ");
		sb.append(" TAG= ");
		sb.append("'" + dimData.getTag() + "'");
		sb.append(" AND ");
		sb.append(" NAME= ");
		sb.append("'" + dimData.getName() + "'");
		sb.append(" ; ");
		SqlRowSet rs = jdbcTemplate.queryForRowSet(sb.toString());
		Map<String, Long> thisActCount = new LinkedHashMap<>();
		while (rs.next()) {
			String content = rs.getString("CONTENT");
			List<Term> terms = seg.seg(content);
			List<String> terms_dis = terms.stream().filter(this::filtWord).map(t -> t.word).distinct()
					.filter(w -> w.trim().length() > 0).collect(Collectors.toList());
			for (String w : terms_dis) {
				Long count_ = thisActCount.get(w);
				if (count_ == null) {
					count_ = new Long(1L);
				} else {
					count_ = new Long(count_.longValue() + 1L);
				}
				thisActCount.put(w, count_);
			}
		}
		for (String nameKey : thisActCount.keySet()) {
			float ret = 0F;
			Long count_ = thisActCount.get(nameKey);
			ret = (float) count_ / (float) doccount;
			String insql = "INSERT INTO CLASSIFY (DATASOURCE,POSITION,TAG,NAME,KEYWORD,WORDCOUNT,FREQUENCY) VALUES(?,?,?,?,?,?,?);";
			jdbcTemplate.update(insql, new Object[] { dimData.getDataSource(), dimData.getPosition(), dimData.getTag(),
					dimData.getName(), nameKey, count_, ret });
		}
	}

	private void takeDataSource() {
		String allsql = "SELECT  DATASOURCE,POSITION,TAG,NAME, count(*) AS C FROM BASE GROUP BY DATASOURCE,POSITION,TAG,NAME ;";
		SqlRowSet rs = jdbcTemplate.queryForRowSet(allsql);
		while (rs.next()) {
			String dataSource = rs.getString("DATASOURCE");
			String position = rs.getString("POSITION");
			String tag = rs.getString("TAG");
			String name = rs.getString("NAME");
			Integer c = rs.getInt("C");
			Long ds = actCount.get(dataSource);
			if (ds == null) {
				actCount.put(dataSource, c.longValue());
			} else {
				actCount.put(dataSource, ds.longValue() + c.longValue());
			}
			DimData key = new DimData(dataSource, position, tag, name);
			classP.put(key, c.longValue());
			/*
			 * if(!position.equals(NULLPOSITION) && !tag.equals(NULLTAG)) { DimData key=new
			 * DimData(dataSource,position,tag,name) ; classP.put(key, c.longValue()); }
			 * if(tag.equals(NULLTAG)) { tag=null; } DimData positionnull=new
			 * DimData(dataSource,null,null,name) ; if(classP.get(positionnull)==null) {
			 * classP.put(positionnull, c.longValue()); }else { Long
			 * v=classP.get(positionnull); classP.put(positionnull,
			 * c.longValue()+v.longValue()); } DimData tagnull=new
			 * DimData(dataSource,position,null,name) ; if(classP.get(tagnull)==null) {
			 * classP.put(tagnull, c.longValue()); }else { Long v=classP.get(tagnull);
			 * classP.put(tagnull, c.longValue()+v.longValue()); }
			 */

		}

		for (String sourceName : actCount.keySet()) {
			String sql = "INSERT INTO METASOURCE(DATASOURCE,MINFREQUENCY,SOURCECOUNT) VALUES(?,?,?);";
			jdbcTemplate.update(sql, new Object[] { sourceName, 0.0f, actCount.get(sourceName) });
		}
		for (DimData dimData : classP.keySet()) {
			String sql = "INSERT INTO METABASE(DATASOURCE,POSITION,TAG,NAME,CLASSCOUNT) VALUES(?,?,?,?,?);";
			jdbcTemplate.update(sql, new Object[] { dimData.getDataSource(), dimData.getPosition(), dimData.getTag(),
					dimData.getName(), classP.get(dimData) });
		}
	}

	private Map<String, Long> wordIDF(String source) {
		Map<String, Long> idf_map = new LinkedHashMap<>();
		String allsql = "SELECT  KEYWORD, SUM(WORDCOUNT) AS C FROM CLASSIFY WHERE DATASOURCE='" + source
				+ "'  GROUP BY  KEYWORD;";
		SqlRowSet rs = jdbcTemplate.queryForRowSet(allsql);
		while (rs.next()) {
			String keyword = rs.getString("KEYWORD");
			Integer c = rs.getInt("C");
			idf_map.put(keyword, c.longValue());
		}
		return idf_map;
	}

	private void updateIDF(Map<String, Long> idf_map, String source, long allcount) {
		// Map<String ,Long> idf_map=wordIDF(source);
		for (String kw : idf_map.keySet()) {
			long j = idf_map.get(kw) + 1L;
			double tmp = (double) allcount / (double) j;
			double idf = Math.log(tmp);
			String updataSql = "UPDATE CLASSIFY SET FREQUENCY = FREQUENCY * " + idf + " WHERE KEYWORD='" + kw + "';";
			jdbcTemplate.execute(updataSql);
		}
	}

	private void takeMiniF(String source) {
		String mixsql = "SELECT min(FREQUENCY) AS nimF FROM CLASSIFY WHERE DATASOURCE='" + source + "';";
		SqlRowSet rs = jdbcTemplate.queryForRowSet(mixsql);
		while (rs.next()) {
			Double d = rs.getDouble("nimF");
			minF.put(source, d);
			String updataSql = "UPDATE METASOURCE SET MINFREQUENCY = " + d.doubleValue() + " WHERE DATASOURCE='"
					+ source + "';";
			jdbcTemplate.execute(updataSql);
		}
	}

	/* (non-Javadoc)
	 * @see com.jx.gocom.nlp.classify.webservice.service.impl.ClassifyServiceDim#retrainningData()
	 */
	@Override
	public void retrainningData() {
		deleteClassifyTable();
		minF.clear();
		actCount.clear();
		classP.clear();
		takeDataSource();
		for (DimData classname : classP.keySet()) {
			classGroup(classname, classP.get(classname));
		}
		for (String s : actCount.keySet()) {
			Map<String, Long> idf_map = wordIDF(s);
			updateIDF(idf_map, s, actCount.get(s));
			takeMiniF(s);
		}
		// idfLimit();
	}

	/* (non-Javadoc)
	 * @see com.jx.gocom.nlp.classify.webservice.service.impl.ClassifyServiceDim#allClass()
	 */
	@Override
	public Set<DimData> allClass() {
		return classP.keySet();
	}

	private int compare(ClassifyResult p1, ClassifyResult p2) {
		if (p1.probility > p2.probility) {
			return -1;
		} else if (p1.probility < p2.probility) {
			return 1;
		} else {
			return 0; // 相等为0
		}
	}

	private String calculatePxcSQL(DimData dimData,String keyword){
		 StringBuffer sb=new StringBuffer();
		sb.append("SELECT FREQUENCY FROM CLASSIFY ");
		sb.append(" WHERE ");
		sb.append(" DATASOURCE= ");
		sb.append("'" + dimData.getDataSource() + "'");
		sb.append(" AND ");
		sb.append(" POSITION= ");
		sb.append("'" + dimData.getPosition() + "'");
		sb.append(" AND ");
		sb.append(" TAG= ");
		sb.append("'" + dimData.getTag() + "'");
		sb.append(" AND ");
		sb.append(" NAME= ");
		sb.append("'" + dimData.getName() + "'");
		sb.append(" AND ");
		sb.append(" KEYWORD= ");
		sb.append("'" + keyword + "'");
		sb.append(" ; ");
		return sb.toString();
	}
	private boolean filterClassName(DimData dimData,String dataSource, String position, String tag){
		String dataSource_=dimData.getDataSource();
		String position_=dimData.getPosition();
		String tag_=dimData.getTag();
		if(dataSource_.equals(dataSource) && position_.equals(position) && tag_.equals(tag) ) {
			return true;
		}
		return false;
	}
	private float calculatePxc(String keyword, DimData dimData,float limit) {
		String sql = calculatePxcSQL(dimData,keyword);
		List<Map<String, Object>> resList = jdbcTemplate.queryForList(sql);
		float ret = limit;
		if (resList != null && resList.size() > 0) {
			Map<String, Object> z = resList.get(0);
			Double d = (Double) z.get("FREQUENCY");
			ret=d.floatValue();
		}
		return ret;
	}
	private float calcProd(List<String>  X, DimData dimData) {
		float ret = 1.0F;
		long allcount=actCount.get(dimData.getDataSource());
		Long docCountL=classP.get(dimData);
		if(docCountL==null) {
			return 0.0f;
		}
		long docCount=docCountL.longValue();
		Double r_t=minF.get(dimData.getDataSource());
		//float ret_t =  1.0f/(float)(Integer.MAX_VALUE);
		float ret_t =  r_t.floatValue()/10.0f;
		int s=0;
		for(String word:X) {
			float f=calculatePxc(word,dimData,ret_t);
			if(f==ret_t) {
				s++;
				logger.info("[#Evaluate#] keyword={},weight={},###=false",word,f*zoomFactor);
			}else {
				logger.info("[#Evaluate#] keyword={},weight={}",word,f*zoomFactor);
			}
			ret*=f*zoomFactor;
		}
		if(X.isEmpty()) {
			return 0.0f;
		}
		float xsize=(float)(X.size());
		float xsize_fc= (float)s / xsize;
		//如果不能匹配的关键字比例大于0.35，说明不符合这个类别的基本要求
		if(xsize_fc>0.35f) {
			logger.info("[#Evaluate#] prior =0.0");
			return 0.0f;
		}
		logger.info("[#Evaluate#] prior ={}",(float)docCount/(float)allcount);
		ret*=(float)docCount/(float)allcount;
		return ret;
	}
	@Override
	public String classify(String dataSource, String position, String tag, String text) {
		TreeMap<ClassifyResult,String> weightMap=classifies(dataSource,position,tag,text);
		if(weightMap.size()<1) {
			return "";
		}
		StringBuffer sb=new StringBuffer();
		sb.append("[");
		sb.append(dataSource);
		sb.append("]");
		sb.append("_");
		sb.append("[");
		sb.append(position);
		sb.append("]");
		sb.append("_");
		sb.append("[");
		sb.append(tag);
		sb.append("]");
		sb.append("_");
		sb.append("[");
		sb.append(weightMap.firstKey().classification);
		sb.append("]");
		return sb.toString();
	}
	/* (non-Javadoc)
	 * @see com.jx.gocom.nlp.classify.webservice.service.impl.ClassifyServiceDim#classifies(java.lang.String, java.lang.String, java.lang.String, java.lang.String)
	 */
	@Override
	public TreeMap<ClassifyResult, String> classifies(String dataSource, String position, String tag, String text) {
		TreeMap<ClassifyResult, String> weightMap = new TreeMap<>(this::compare);
		if (actCount.size() == 0 || classP.size() == 0) {
			idfLimitAndAllcount();
		}
		if (actCount.size() == 0) {
			return null;
		}
		List<Term> terms = seg.seg(text);
		List<String> terms_dis = terms.stream().filter(this::filtWord).map(t -> t.word).distinct()
				.collect(Collectors.toList());
		logger.info("[#Evaluate#] [start] ... classify text={}",text);
		List<DimData> names=classP.keySet().stream().filter(d->this.filterClassName(d,dataSource,position,tag)).collect(Collectors.toList());
		logger.info("[#Evaluate#] [DimData] ... classify path={}",text);
		for (DimData dimData : names) {
			String className=dimData.getName();
			logger.info("[#Evaluate#]  classify  path={}",dimData.toString());
			ClassifyResult cr = new ClassifyResult();
			cr.classification = className;
			cr.probility = calcProd(terms_dis, dimData);
			logger.info("[#Evaluate#] probility ={}", cr.probility);
			if (cr.probility > 0.0f) {
				weightMap.put(cr, "");
			} else {
				logger.info("[#Evaluate#] {} is Empty!!!", dimData.toString());
			}
		}
		logger.info("[#Evaluate#] [end] ... classify text={}", text);
		return weightMap;
	}

}
