package guoanmaker.operator.business.service;

import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import javax.transaction.Transactional;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import guoanmaker.operator.business.dao.OperatorSensitiveWordDao;
import guoanmaker.operator.business.model.OperatorSensitiveWord;
import guoanmaker.operator.business.model.ReturnResult;
import guoanmaker.operator.business.repository.OperatorSensitiveWordRepository;
import guoanmaker.operator.helper.common.util.SensitiveWordInit;
import guoanmaker.operator.helper.common.util.SensitivewordEngine;

@Service
public class OperatorSensitiveWordService implements OperatorSensitiveWordServiceInterface{

	@Autowired
	private OperatorSensitiveWordRepository operatorSensitiveWordRepository;
	
	@Autowired
	private OperatorSensitiveWordDao operatorSensitiveWordDao;
	
	@Override
	public Set<String> sensitiveWordVerification(String text){
		text=text==null?"":text;
		if(text.equals("")){
			return new HashSet<String>(0);
		}
		System.out.println("共有："+text.length()+"字符");
		Long t1=System.currentTimeMillis();
		SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();
        // 从数据库中获取敏感词对象集合（调用的方法来自Dao层，此方法是service层的实现类）
        List<OperatorSensitiveWord> sensitiveWords = operatorSensitiveWordRepository.findAll();
        // 构建敏感词库
        Map sensitiveWordMap = sensitiveWordInit.initKeyWord(sensitiveWords);
        // 传入SensitivewordEngine类中的敏感词库
        Long t2=System.currentTimeMillis();
        System.out.println("构建敏感词库所用时间："+(t2-t1)+"毫秒");
        SensitivewordEngine.sensitiveWordMap = sensitiveWordMap;
        // 得到敏感词有哪些，传入2表示获取所有敏感词
        Set<String> set = SensitivewordEngine.getSensitiveWord(text, 2);
        Long t3=System.currentTimeMillis();
        System.out.println("过滤所用时间："+(t3-t2)+"毫秒");
        return set;
	}
	
	/**
	 * @description 向表t_operator_sensitiveword中保存或修改敏感字,不能出现重复敏感字记录
	 * @param word 敏感字
	 * @param id 敏感字id
	 * @return ReturnResult
	 */
	@Override
	@Transactional
	public ReturnResult saveOrUpdateSensitiveWord(String word, String id) {
		ReturnResult returnResult = new ReturnResult();
		if(word == null || word.trim().length() == 0){
			returnResult.setKey("error");
			returnResult.setValue("敏感字为空");
			return returnResult;
		}
		long count = operatorSensitiveWordDao.countSameWord(word, id);
		
		if(count > 0){
			returnResult.setKey("error");
			returnResult.setValue("敏感字重复");
			return returnResult;
		}
		
		if(id == null || id.trim().length() == 0){
			OperatorSensitiveWord operatorSensitiveWord = new OperatorSensitiveWord(word.trim());
			operatorSensitiveWordRepository.save(operatorSensitiveWord);
		} else{
			OperatorSensitiveWord operatorSensitiveWord = operatorSensitiveWordRepository.findOne(id);
			operatorSensitiveWord.setSensitivewordName(word.trim());
		}
		returnResult.setKey("success");
		returnResult.setValue("操作成功");
		return returnResult;
	}
	
	/**
	 * @description 根据id删除敏感字记录
	 * @param id 敏感字id
	 * @return ReturnResult
	 */
	@Override
	@Transactional
	public ReturnResult deleteSensitiveWord(String id) {
		ReturnResult returnResult = new ReturnResult();
		if(id == null || id.trim().length() == 0){
			returnResult.setKey("error");
			returnResult.setValue("敏感字id为空");
			return returnResult;
		}
		
		operatorSensitiveWordRepository.delete(id);
		returnResult.setKey("success");
		returnResult.setValue("操作成功");
		return returnResult;
	}
	
	/**
	 * @description 根据敏感字模糊查询
	 * @param word 敏感字
	 * @param page 当前页数,从0开始
	 * @param size 页面中可显示记录数
	 * @return Map<String, Object>
	 */
	@Override
	public Map<String, Object> findByWord(String word, String page, String size) {
		Map<String, Object> resultMap = new HashMap<String, Object>();
		resultMap.put("total", operatorSensitiveWordDao.totalLikeWord(word));
		resultMap.put("list", operatorSensitiveWordDao.findByWord(word, page, size));
		return resultMap;
	}
}
