package com.ukefu.util.sensitive;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang.StringUtils;

import com.ukefu.core.UKDataContext;
import com.ukefu.util.UKTools;
import com.ukefu.webim.service.repository.SensitiveWordRepository;
import com.ukefu.webim.service.repository.TenantRepository;
import com.ukefu.webim.web.model.SensitiveWord;
import com.ukefu.webim.web.model.Tenant;
import com.ukefu.webim.web.model.WorldNode;

/**
 * DF算法判断是否包含敏感词
 * 
 * @author iceworld
 *
 */
public class SensitiveFilter {

	private Map<Integer, WorldNode> sensitiveWorld = new HashMap<Integer, WorldNode>();

	private Map<String, Map<Integer, WorldNode>> sensitiveWorldMap = new HashMap<String, Map<Integer, WorldNode>>();

	private Map<String, Map<Integer, WorldNode>> tsensitiveTypeWorld = new HashMap<String, Map<Integer, WorldNode>>();

	private Map<String, Map<String, Map<Integer, WorldNode>>> tsensitiveWorldMap = new HashMap<String, Map<String, Map<Integer, WorldNode>>>();

	private static char SIGN = '*';
	private static SensitiveFilter sensitive = new SensitiveFilter();

	public static SensitiveFilter getSensitiveFilter() {
		return sensitive;
	}

	/*
	 * public void clean() { this.sensitiveWorld.clear(); }
	 */
	public void initSensitiveWords() {
		SensitiveWordRepository wordsRes = UKDataContext.getContext().getBean(SensitiveWordRepository.class);
		this.addtSensitiveWorlds(wordsRes.findAll());
	}

	public void initSensitiveWords(String orgi) {
		if (this.sensitiveWorldMap.get(orgi) != null && this.sensitiveWorldMap.get(orgi).size() > 0) {
			this.sensitiveWorldMap.get(orgi).clear();
		}
		SensitiveWordRepository wordsRes = UKDataContext.getContext().getBean(SensitiveWordRepository.class);
		this.addSensitiveWorlds(wordsRes.findByOrgi(orgi));
	}

	public void addSensitiveWorlds(List<SensitiveWord> words) {
		sensitiveWorld = new HashMap<Integer, WorldNode>();
		for (SensitiveWord ucword : words) {
			sensitiveWorld = this.sensitiveWorldMap.get(ucword.getOrgi());
			if (sensitiveWorld == null) {
				sensitiveWorld = new HashMap<Integer, WorldNode>();
			}
			StringBuffer strb = new StringBuffer();
			strb.append(ucword.getKeyword()).append(",").append(ucword.getSuperordinate()).append(",")
					.append(ucword.getContent());
			String[] sensitiveWords = strb.toString().split(",");
			for (String key : sensitiveWords) {
				char[] worldCharArray = key.toCharArray();
				int convertNum = BCConvert.qj2bj(worldCharArray[0]);
				WorldNode senNode = sensitiveWorld.get(convertNum);
				// 有没有把敏感词首字符放进集合
				if (senNode == null) {
					senNode = new WorldNode(convertNum, worldCharArray.length == 1, ucword.getOrgi(), ucword.getType());
					sensitiveWorld.put(convertNum, senNode);
				} else {
					// 设置字符是否为叶子
					if (senNode.isLeaf()) {
						senNode.setLeaf(worldCharArray.length == 1);
					}
				}
				// 敏感词子集合
				for (int i = 1; i < worldCharArray.length; i++) {
					boolean flag = i == worldCharArray.length - 1;
					senNode = senNode.addChildNode(BCConvert.qj2bj(worldCharArray[i]), flag, ucword.getOrgi(),
							ucword.getType());
				}
			}
			this.sensitiveWorldMap.put(ucword.getOrgi(), sensitiveWorld);
		}
	}

	public String doFilter(String source, String orgi, String type) {
		sensitiveWorld = this.sensitiveWorldMap.get(orgi);
		char[] sourceArray = source.toCharArray();
		if (sensitiveWorld != null) {
			for (int i = 0; i < sourceArray.length; i++) {
				WorldNode findNode = sensitiveWorld.get(BCConvert.qj2bj(sourceArray[i]));
				if (findNode == null) {
					continue;
				}
				int findFlag = i;
				boolean senEndMark = false;
				// 单个过滤字符
				if (findNode.isLeaf()) {
					sourceArray[findFlag] = SIGN;
				}
				for (; ++findFlag < sourceArray.length;) {
					findNode = findNode.queryChildNode(BCConvert.qj2bj(sourceArray[findFlag]));
					if (findNode == null) {
						break;
					}
					if (findNode.isLeaf() && findNode.getOrgi().equals(orgi) && findNode.getType().equals(type)) {
						senEndMark = true;
						break;
					}
				}
				if (senEndMark) {
					for (int k = i; k <= findFlag; k++) {
						sourceArray[k] = SIGN;
					}
					i = findFlag;
				}
			}
		}

		return new String(sourceArray);
	}

	public boolean hasSensitiveWords(String source, String orgi, String type) {
		sensitiveWorld = this.sensitiveWorldMap.get(orgi);
		char[] sourceArray = source.toCharArray();
		boolean hasSensitive = false;
		if (sensitiveWorld != null) {
			for (int i = 0; i < sourceArray.length; i++) {
				WorldNode findNode = sensitiveWorld.get(BCConvert.qj2bj(sourceArray[i]));
				if (findNode == null) {
					continue;
				}
				int findFlag = i;
				boolean senEndMark = false;
				// 单个过滤字符
				if (findNode.isLeaf()) {
					return true;
				}
				for (; ++findFlag < sourceArray.length;) {
					findNode = findNode.queryChildNode(BCConvert.qj2bj(sourceArray[findFlag]));
					if (findNode == null) {
						break;
					}
					if (findNode.isLeaf() && findNode.getOrgi().equals(orgi) && findNode.getType().equals(type)) {
						senEndMark = true;
						hasSensitive = true;
						break;
					}
				}
				if (senEndMark) {
					for (int k = i; k <= findFlag; k++) {
						sourceArray[k] = SIGN;
					}
					i = findFlag;
				}
			}
		}

		return hasSensitive;
	}

	/**
	 * 分类敏感词（坐席访客敏感词专用）
	 */
	public void inittSensitiveWords() {
		if (UKTools.getSystemConfig().isEnabletneant()) {
			TenantRepository tenantRepository = UKDataContext.getContext().getBean(TenantRepository.class);
			List<Tenant> tenantList = tenantRepository.findAll();
			if (tenantList != null) {
				for(Tenant tenant : tenantList){
					if (!StringUtils.isBlank(tenant.getTenantcode())) {
						tsensitiveTypeWorld = new HashMap<String, Map<Integer, WorldNode>>();
						tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.USER.toString(), new HashMap<Integer, WorldNode>());
						tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.AGENT.toString(), new HashMap<Integer, WorldNode>());
						tsensitiveWorldMap.put(tenant.getTenantcode(), tsensitiveTypeWorld);
					}
				}
			}
		}
		if(tsensitiveWorldMap.get(UKDataContext.SYSTEM_ORGI)==null){
			tsensitiveTypeWorld = new HashMap<String, Map<Integer, WorldNode>>();
			tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.USER.toString(), new HashMap<Integer, WorldNode>());
			tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.AGENT.toString(), new HashMap<Integer, WorldNode>());
			tsensitiveWorldMap.put(UKDataContext.SYSTEM_ORGI, tsensitiveTypeWorld);
		}
		
		SensitiveWordRepository wordsRes = UKDataContext.getContext().getBean(SensitiveWordRepository.class);
		this.addtSensitiveWorlds(wordsRes.findAll());
	}

	/**
	 * 分类敏感词（坐席访客敏感词专用）
	 */
	public void inittSensitiveWords(String orgi) {
		if (this.tsensitiveWorldMap.get(orgi) != null && this.tsensitiveWorldMap.get(orgi).size() > 0) {
			this.tsensitiveWorldMap.get(orgi).clear();
		}
		if(tsensitiveWorldMap.get(orgi)==null){
			tsensitiveTypeWorld = new HashMap<String, Map<Integer, WorldNode>>();
			tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.USER.toString(), new HashMap<Integer, WorldNode>());
			tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.AGENT.toString(), new HashMap<Integer, WorldNode>());
			tsensitiveWorldMap.put(orgi, tsensitiveTypeWorld);
		}
		SensitiveWordRepository wordsRes = UKDataContext.getContext().getBean(SensitiveWordRepository.class);
		this.addtSensitiveWorlds(wordsRes.findByOrgi(orgi));
	}

	/**
	 * 分类敏感词（坐席访客敏感词专用）
	 */
	public void addtSensitiveWorlds(List<SensitiveWord> words) {
		sensitiveWorld = new HashMap<Integer, WorldNode>();
		for (SensitiveWord ucword : words) {
			if(tsensitiveWorldMap.get(ucword.getOrgi())==null){
				tsensitiveTypeWorld = new HashMap<String, Map<Integer, WorldNode>>();
				tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.USER.toString(), new HashMap<Integer, WorldNode>());
				tsensitiveTypeWorld.put(UKDataContext.SensitiveWordType.AGENT.toString(), new HashMap<Integer, WorldNode>());
				tsensitiveWorldMap.put(ucword.getOrgi(), tsensitiveTypeWorld);
			}
			sensitiveWorld = this.tsensitiveWorldMap.get(ucword.getOrgi()).get(ucword.getType());
			tsensitiveTypeWorld = this.tsensitiveWorldMap.get(ucword.getOrgi());
			if (sensitiveWorld == null) {
				sensitiveWorld = new HashMap<Integer, WorldNode>();
			}
			StringBuffer strb = new StringBuffer();
			strb.append(ucword.getKeyword());
			String[] sensitiveWords = strb.toString().split(",");
			for (String key : sensitiveWords) {
				char[] worldCharArray = key.toCharArray();
				int convertNum = BCConvert.qj2bj(worldCharArray[0]);
				WorldNode senNode = sensitiveWorld.get(convertNum);
				// 有没有把敏感词首字符放进集合
				if (senNode == null) {
					senNode = new WorldNode(convertNum, worldCharArray.length == 1, ucword.getOrgi(), ucword.getType());
					sensitiveWorld.put(convertNum, senNode);
				} else {
					// 设置字符是否为叶子
					if (senNode.isLeaf()) {
						senNode.setLeaf(worldCharArray.length == 1);
					}
				}
				// 敏感词子集合
				for (int i = 1; i < worldCharArray.length; i++) {
					boolean flag = i == worldCharArray.length - 1;
					senNode = senNode.addChildNode(BCConvert.qj2bj(worldCharArray[i]), flag, ucword.getOrgi(),
							ucword.getType());
				}
			}
			tsensitiveTypeWorld.put(ucword.getType(), sensitiveWorld);
			this.tsensitiveWorldMap.put(ucword.getOrgi(), tsensitiveTypeWorld);
		}
	}

	/**
	 * 分类敏感词（坐席访客敏感词专用）
	 */
	public String dotFilter(String source, String orgi, String type) {
		char[] sourceArray = source.toCharArray();
		if (this.tsensitiveWorldMap.get(orgi) == null) {
			return new String(sourceArray);
		}
		sensitiveWorld = this.tsensitiveWorldMap.get(orgi).get(type);
		if (sensitiveWorld != null) {
			for (int i = 0; i < sourceArray.length; i++) {
				WorldNode findNode = sensitiveWorld.get(BCConvert.qj2bj(sourceArray[i]));
				if (findNode == null) {
					continue;
				}
				int findFlag = i;
				boolean senEndMark = false;
				// 单个过滤字符
				if (findNode.isLeaf()) {
					sourceArray[findFlag] = SIGN;
				}
				for (; ++findFlag < sourceArray.length;) {
					findNode = findNode.queryChildNode(BCConvert.qj2bj(sourceArray[findFlag]));
					if (findNode == null) {
						break;
					}
					if (findNode.isLeaf() && findNode.getOrgi().equals(orgi) && findNode.getType().equals(type)) {
						senEndMark = true;
						break;
					}
				}
				if (senEndMark) {
					for (int k = i; k <= findFlag; k++) {
						sourceArray[k] = SIGN;
					}
					i = findFlag;
				}
			}
		}

		return new String(sourceArray);
	}

	/**
	 * 分类敏感词（坐席访客敏感词专用）
	 */
	public boolean hastSensitiveWords(String source, String orgi, String type) {
		if (this.tsensitiveWorldMap.get(orgi) == null) {
			return false;
		}
		sensitiveWorld = this.tsensitiveWorldMap.get(orgi).get(type);
		char[] sourceArray = source.toCharArray();
		boolean hasSensitive = false;
		if (sensitiveWorld != null) {
			for (int i = 0; i < sourceArray.length; i++) {
				WorldNode findNode = sensitiveWorld.get(BCConvert.qj2bj(sourceArray[i]));
				if (findNode == null) {
					continue;
				}
				int findFlag = i;
				boolean senEndMark = false;
				// 单个过滤字符
				if (findNode.isLeaf()) {
					return true;
				}
				for (; ++findFlag < sourceArray.length;) {
					findNode = findNode.queryChildNode(BCConvert.qj2bj(sourceArray[findFlag]));
					if (findNode == null) {
						break;
					}
					if (findNode.isLeaf() && findNode.getOrgi().equals(orgi) && findNode.getType().equals(type)) {
						senEndMark = true;
						hasSensitive = true;
						break;
					}
				}
				if (senEndMark) {
					for (int k = i; k <= findFlag; k++) {
						sourceArray[k] = SIGN;
					}
					i = findFlag;
				}
			}
		}

		return hasSensitive;
	}

	public static void main(String[] args) {
		// List<SensitiveWord> list = new ArrayList<SensitiveWord>();
		// SensitiveWord words = new SensitiveWord("星空" , "天空" , "星星" ) ;
		// words.setOrgi("ukewo");
		// words.setType("agent");
		// list.add(words);
		// SensitiveWord words2 = new SensitiveWord("马路" , "行走" , "哈哈" ) ;
		// words2.setOrgi("ukewo");
		// words2.setType("agent");
		// list.add(words2);
		// SensitiveFilter filter = new SensitiveFilter();
		// filter.addSensitiveWorlds( list);
		//
		// String doFilterStr = filter.doFilter("在漫天星空下载马路上行走，哈哈" , "ukewo" ,
		// "agent");
		// System.out.println(doFilterStr);
		//
		// System.out.println(filter.hasSensitiveWords("在漫天星空下载马路上行走，哈哈",
		// "ukewo" , "agent"));

	}
}