package com.bci.pwtz.service.impl;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;

import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.log4j.Logger;
import org.springframework.dao.DataAccessException;
import org.springframework.stereotype.Service;

import com.bci.pwtz.common.PwtzConstants;
import com.bci.pwtz.common.util.MutiPatternParser;
import com.bci.pwtz.common.util.PaginationList;
import com.bci.pwtz.exception.InvalidParametersException;
import com.bci.pwtz.exception.PwtzDatabaseException;
import com.bci.pwtz.mysql.dao.SensitiveWordMapper;
import com.bci.pwtz.mysql.model.SensitiveWord;
import com.bci.pwtz.service.BusinessRefService;
import com.bci.pwtz.service.SensitiveWordService;

@Service
public class SensitiveWordServiceImpl implements SensitiveWordService {
	private SensitiveWordMapper sensitiveWordMapper;
	private BusinessRefService businessDataUtilService;
	public SensitiveWordMapper getSensitiveWordMapper() {
		return sensitiveWordMapper;
	}

	public void setSensitiveWordMapper(SensitiveWordMapper sensitiveWordMapper) {
		this.sensitiveWordMapper = sensitiveWordMapper;
	}

	public BusinessRefService getBusinessDataUtilService() {
		return businessDataUtilService;
	}

	public void setBusinessDataUtilService(
			BusinessRefService businessDataUtilService) {
		this.businessDataUtilService = businessDataUtilService;
	}

	private static final Logger _logger=Logger.getLogger(SensitiveWordServiceImpl.class);
	@Override
	public SensitiveWord add(SensitiveWord sensitiveWord)
			throws PwtzDatabaseException {
		if(_logger.isDebugEnabled()){
			_logger.debug("add:sensitiveWord"+sensitiveWord.toString());
		}
		if(sensitiveWord==null){
			if(_logger.isDebugEnabled()){
				_logger.debug("add,sensitiveWord is null");
			}
			return null;
		}else{
			sensitiveWord.setStatus(1);
		}
		try{
			sensitiveWordMapper.insertSelective(sensitiveWord);
		}catch(DataAccessException e){
			_logger.error(ExceptionUtils.getStackTrace(e));
			throw new PwtzDatabaseException();
		}
		return sensitiveWord;
	}

	@Override
	public int delete(Long sensitiveWordId) throws PwtzDatabaseException {
		int result=0;
		if(_logger.isDebugEnabled()){
			_logger.debug("delete:sensitiveWordId="+sensitiveWordId.toString());
		}
		try{
			result=sensitiveWordMapper.deleteById(sensitiveWordId);
		}catch(DataAccessException e){
			_logger.error(ExceptionUtils.getStackTrace(e));
			throw new PwtzDatabaseException();
		}
		return result;
	}

	@Override
	public int upadte(SensitiveWord sensitiveWord) throws PwtzDatabaseException {
		if(_logger.isDebugEnabled()){
			_logger.debug("update:sensitiveWord"+sensitiveWord.toString());
		}
		int result=0;
		if(sensitiveWord==null){
			if(_logger.isDebugEnabled()){
				_logger.debug("update,sensitiveWord is null");
			}
			return result;
		}
		try{
			result=sensitiveWordMapper.updateByPrimaryKeySelective(sensitiveWord);
		}catch(DataAccessException e){
			_logger.error(ExceptionUtils.getStackTrace(e));
			throw new PwtzDatabaseException();
		}
		return result;
	}
	
	public PaginationList<SensitiveWord> selectSensitiveWords(String beginTime,String endTime,int pageSize,int pageNo) throws PwtzDatabaseException {
		PaginationList<SensitiveWord> sensitiveWords=new PaginationList<SensitiveWord>();
		int beginIndex=pageSize*(pageNo-1);
		if(_logger.isInfoEnabled()){
			_logger.info("selectSensitiveWord:|beginTime"+beginTime+"|endTime"+endTime);
		}
		List<SensitiveWord> list=null;
		try{
			list=sensitiveWordMapper.selectSensitiveWords(beginTime,endTime,beginIndex,pageSize);
		}catch(DataAccessException e){
			_logger.error(ExceptionUtils.getStackTrace(e));
			throw new PwtzDatabaseException();
		}
		 if (list!=null)
        {
            _logger.debug("findSensitiveWords:result sensitiveWords=" + list.toString());
        }
        int totalCount = 0;
        int totalPage =1;
        if(pageSize!=0){
            totalPage = totalCount % pageSize == 0 ? totalCount / pageSize : totalCount / pageSize + 1;
            try
            {
                totalCount =sensitiveWordMapper.selectSensitiveWordNum(beginTime, endTime);
            }
            catch (DataAccessException e)
            {
                _logger.error(ExceptionUtils.getStackTrace(e));
                throw new PwtzDatabaseException();
            }
        }else{
            pageNo=1;
            if(list!=null && list.size()>0){
                totalCount = list.size();
            }
        }
        sensitiveWords.setTotalPage(totalPage);
        sensitiveWords.setCurrPage(pageNo);
        sensitiveWords.setPageSize(pageSize);
        sensitiveWords.setRecords(list);
        sensitiveWords.setTotalRecord(totalCount);
		return sensitiveWords;
	}

	@Override
	public SensitiveWord load(Long sensitiveWordId)
			throws PwtzDatabaseException {
		if(_logger.isInfoEnabled()){
			_logger.info("load:sensitiveWordId="+sensitiveWordId.toString());
		}
		SensitiveWord sensitiveWord=null;
		try{
			sensitiveWord=sensitiveWordMapper.selectByPrimaryKey(sensitiveWordId);
		}catch(DataAccessException e){
			_logger.error(ExceptionUtils.getStackTrace(e));
			throw new PwtzDatabaseException();
		}		
		return sensitiveWord;
	}
	
	/*
     * (non-Javadoc)
     * Description: <br/>
     * 
     * @param content
     * 
     * @param levels
     * 
     * @return
     * 
     * @see com.bcinfo.upp.base.service.SensitiveWordServiceStub#match(java.lang.String, int[])
     */
    public Map<Integer, List<String>> match(String content, int[] levels) throws InvalidParametersException
    {
        if (_logger.isDebugEnabled())
        {
            _logger.debug("match:levels=" + levels + ", content=" + content);
        }

        Map<Integer, List<String>> matchedWordMap = new HashMap<Integer, List<String>>();

        if (null != levels && levels.length >= 1)
        {
            for (int i = 0; i < levels.length; i++)
            {
                List<String> wordsList = new ArrayList<String>();
                Vector<String> matched = PwtzConstants.mutiPatternParserMap.get((short) levels[i]).parse(content,
                        new Vector<Integer>());
                for (String macth : matched)
                {
                    wordsList.add(macth);
                }
                matchedWordMap.put(levels[i], wordsList);
            }
        }
        else
        {
            _logger.error("parameter is wrong");
            throw new InvalidParametersException();
        }
        return matchedWordMap;
    }

    public boolean matchContent(String content, int[] levels) throws InvalidParametersException
    {
        if (_logger.isDebugEnabled())
        {
            _logger.debug("match:levels=" + levels + ", content=" + content);
        }
        Map<Integer, List<String>> matchedWordMap = new HashMap<Integer, List<String>>();

        if (null != levels && levels.length >= 1)
        {
            for (int i = 0; i < levels.length; i++)
            {
                List<String> wordsList = new ArrayList<String>();
                Vector<String> matched = PwtzConstants.mutiPatternParserMap.get((short) levels[i]).parse(content,
                        new Vector<Integer>());
                for (String macth : matched)
                {
                    return true;
                }
            }
        }
        else
        {
            _logger.error("parameter is wrong");
            throw new InvalidParametersException();
        }
        return false;
    }
    /**
     * 初始化敏感词级别与过滤算法集合<br/>
     * <p>Description: 初始化敏感词级别与过滤算法集合 
     * <br/>
     * <p>Author: Sam<br/>
     * <p>Date: 2013-6-18-下午6:04:02<br/>
     * <p>
     * @throws PwtzDatabaseException   
     * 
     */
    @Override
    public ConcurrentHashMap<Short, MutiPatternParser> initMutiPatternParserMap() throws PwtzDatabaseException {
        if (_logger.isDebugEnabled())
        {
            _logger.debug("SensitiveWordServiceStubImpl initMutiPatternParserMap start...");
        }
        // 获得所有敏感词级别集合
        ConcurrentHashMap<Short, MutiPatternParser> mutiPatternParserMap = new ConcurrentHashMap<Short, MutiPatternParser>();
        List<Short> levelList = sensitiveWordMapper.selectAllWordLevel();
        if (null != levelList)
        {
            for (Short level : levelList)
            {
                MutiPatternParser filterEngine = new MutiPatternParser();
                // 获得该级别敏感词集合
                List<String> wordLists = sensitiveWordMapper.selectByLevel(level);
                // 初始化对应敏感词级别匹配算法对象
                for (String word : wordLists)
                {
                    filterEngine.addFilterKeyWord(word, level);
                }
                mutiPatternParserMap.put(level, filterEngine);
            }
        }
        return mutiPatternParserMap;
    }

    @Override
    public List<SensitiveWord> selectSensitiveWordList() throws PwtzDatabaseException
    {
        List<SensitiveWord> list=null;
        try{
            list=sensitiveWordMapper.selectSensitiveWordList();
        }catch(DataAccessException e){
            _logger.error(ExceptionUtils.getStackTrace(e));
            throw new PwtzDatabaseException();
        }
         if (list!=null)
        {
            _logger.debug("findSensitiveWords:result sensitiveWords=" + list.toString());
        }
        return list;
    }

}
