package com.gack.business.service;

import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.transaction.Transactional;

import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import com.gack.business.dao.SensitiveWordDao;
import com.gack.business.model.SensitiveWord;
import com.gack.business.repository.SensitiveWordRepository;
import com.gack.helper.common.abstractobj.Result;
import com.gack.helper.common.util.SensitiveWordInit;
import com.gack.helper.common.util.SensitivewordEngine;

@Service
public class SensitiveWordService implements SensitiveWordServiceInterface{

	@Autowired
	private SensitiveWordRepository sensitiveWordRepository;
	
	@Autowired
	private SensitiveWordDao sensitiveWordDao;
	
	@Autowired
	private BackLogServiceInterface backLogService;
	
	@Override
	public Set<String> sensitiveWordVerification(String text){
		text=text==null?"":text;
		if(text.equals("")){
			return new HashSet<String>(0);
		}
		System.out.println("共有："+text.length()+"字符");
		Long t1=System.currentTimeMillis();
		SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();
        // 从数据库中获取敏感词对象集合（调用的方法来自Dao层，此方法是service层的实现类）
        List<SensitiveWord> sensitiveWords = sensitiveWordRepository.findAll();
        // 构建敏感词库
        Map sensitiveWordMap = sensitiveWordInit.initKeyWord(sensitiveWords);
        // 传入SensitivewordEngine类中的敏感词库
        Long t2=System.currentTimeMillis();
        System.out.println("构建敏感词库所用时间："+(t2-t1)+"毫秒");
        SensitivewordEngine.sensitiveWordMap = sensitiveWordMap;
        // 得到敏感词有哪些，传入2表示获取所有敏感词
        Set<String> set = SensitivewordEngine.getSensitiveWord(text, 2);
        Long t3=System.currentTimeMillis();
        System.out.println("过滤所用时间："+(t3-t2)+"毫秒");
        return set;
	}
	
	/**
	 * @description 向表t_operator_sensitiveword中保存或修改敏感字,不能出现重复敏感字记录
	 * @param word 敏感字
	 * @param id 敏感字id
	 * @return ReturnResult
	 */
	@Override
	@Transactional
	public Result saveOrUpdateSensitiveWord(String userid,String word, String id) {
		Result returnResult = new Result();
		if(word == null || word.trim().length() == 0){
			returnResult.setKey("error");
			returnResult.setValue("敏感字为空");
			return returnResult;
		}
		long count = sensitiveWordDao.countSameWord(word, id);
		
		if(count > 0){
			returnResult.setKey("error");
			returnResult.setValue("敏感字重复");
			return returnResult;
		}
		
		if(id == null || id.trim().length() == 0){
			SensitiveWord operatorSensitiveWord = new SensitiveWord();
			operatorSensitiveWord.setSensitivewordName(word.trim());
			operatorSensitiveWord.setCreate_time(new Date());
			operatorSensitiveWord = sensitiveWordRepository.save(operatorSensitiveWord);
			String log = "添加新的敏感词>>";
			log += " 敏感词id："+operatorSensitiveWord.getId();
			log += " 敏感词名称："+operatorSensitiveWord.getSensitivewordName();
			backLogService.addBackLog(userid, log);
		} else{
			SensitiveWord operatorSensitiveWord = sensitiveWordRepository.findOne(id);
			String old_word = operatorSensitiveWord.getSensitivewordName();
			operatorSensitiveWord.setSensitivewordName(word.trim());
			String log = "修改敏感词>>";
			log += " 敏感词id："+id;
			log += " 原敏感词："+old_word;
			log += " 新敏感词："+word.trim();
			backLogService.addBackLog(userid, log);
		}
		returnResult.setKey("success");
		returnResult.setValue("操作成功");
		return returnResult;
	}
	
	/**
	 * @description 根据id删除敏感字记录
	 * @param id 敏感字id
	 * @return ReturnResult
	 */
	@Override
	@Transactional
	public Result deleteSensitiveWord(String userid,String id) {
		Result returnResult = new Result();
		if(id == null || id.trim().length() == 0){
			returnResult.setKey("error");
			returnResult.setValue("敏感字id为空");
			return returnResult;
		}
		SensitiveWord operatorSensitiveWord = sensitiveWordRepository.findOne(id);
		if(operatorSensitiveWord == null){
			returnResult.setKey("error");
			returnResult.setValue("敏感词不存在");
			return returnResult;
		}
		sensitiveWordRepository.delete(id);
		String log = "删除敏感词>>";
		log += " 敏感词id："+id;
		log += " 敏感词名称："+operatorSensitiveWord.getSensitivewordName();
		backLogService.addBackLog(userid, "删除指定敏感字");
		returnResult.setKey("success");
		returnResult.setValue("操作成功");
		return returnResult;
	}
	
	/**
	 * @description 根据敏感字模糊查询
	 * @param word 敏感字
	 * @param page 当前页数,从0开始
	 * @param size 页面中可显示记录数
	 * @return Map<String, Object>
	 */
	@Override
	public Map<String, Object> findByWord(String word, String page, String size) {
		Map<String, Object> resultMap = new HashMap<String, Object>();
		resultMap.put("total", sensitiveWordDao.totalLikeWord(word));
		resultMap.put("list", sensitiveWordDao.findByWord(word, page, size));
		return resultMap;
	}

	/**
	 * 敏感词导入
	 */
	@Override
	public Result importSensitiveWord(HttpServletRequest request, HttpServletResponse response,
			MultipartFile dataFile,String userid) throws Exception {
		Result result = new Result();
		InputStream in = dataFile.getInputStream();
		Workbook wb = WorkbookFactory.create(in);
		Sheet sheet = wb.getSheetAt(0);
		int totcount = sheet.getLastRowNum();
		Date now = new Date();
		List<SensitiveWord> sensitiveWords = new ArrayList<>();
		for(int i =0;i<totcount;i++){
			Row row = sheet.getRow(i+1);
			if(row.getCell(0) == null || row.getCell(1) == null){
				result.setKey("error");
				result.setValue(null);
				result.setMsg("格式错误，敏感词导入失败");
				return result;
			}
			SensitiveWord sensitiveWord = new SensitiveWord();
			row.getCell(0).setCellType(Cell.CELL_TYPE_STRING);
			row.getCell(1).setCellType(Cell.CELL_TYPE_STRING);
			String key = row.getCell(1).getStringCellValue().trim();
			String sensitivewordName = row.getCell(1).getStringCellValue().trim();
			sensitiveWord.setSensitivewordName(sensitivewordName);
			sensitiveWord.setCreate_time(now);
			sensitiveWords.add(sensitiveWord);
		}
		//敏感词去重
		sensitiveWords = havingSensitiveWord(sensitiveWords);
		sensitiveWordRepository.save(sensitiveWords);
		backLogService.addBackLog(userid, "通过excel导入敏感字");
		result.setKey("success");
		result.setValue(null);
		result.setMsg("成功导入 "+sensitiveWords.size()+" 条敏感词");
		return result;
	}
	
	//敏感词去重：
	List<SensitiveWord> havingSensitiveWord(List<SensitiveWord> sensitiveWords){
		long begin = (new Date()).getTime();
		List<SensitiveWord> allWords = sensitiveWordRepository.findAll();
		Set<SensitiveWord> wordSet = new HashSet<>(allWords);
		int count = wordSet.size();
		List<SensitiveWord> removeWords = new ArrayList<>();
		for(SensitiveWord word : sensitiveWords){
			wordSet.add(word);
			if(count == wordSet.size()){
				removeWords.add(word);
//				System.out.println("去除重复敏感词 ： "+word.getSensitivewordName());
			}
		}
		System.out.println("导入词汇去重前数量   "+sensitiveWords.size());
		sensitiveWords.removeAll(removeWords);
		System.out.println("导入词汇去重后数量   "+sensitiveWords.size());
		long end = (new Date()).getTime();
		System.out.println("导入敏感词去重运行时间   "+(end-begin));
		return sensitiveWords;
	}
}
