package com.clz.hyperit.filter;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import net.sf.json.JSONObject;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class WordsFilter implements Filter {
	private final Logger logger = LoggerFactory.getLogger(WordsFilter.class);
	private List<String> sensitiveWord = new ArrayList<String>();

	public void destroy() {
		// TODO Auto-generated method stub

	}

	@SuppressWarnings("unused")
	public void doFilter(ServletRequest request, ServletResponse response,
			FilterChain chain) throws IOException, ServletException {
		// TODO Auto-generated method stub
		HttpServletRequest httpRequest = (HttpServletRequest) request;
		HttpServletResponse httpResponse = (HttpServletResponse) response;
		List<String> title_keywordlist = new ArrayList<String>();
		List<String> content_md_keywordlist = new ArrayList<String>();
		Map<String, String> keywordmap = new HashMap<String, String>();
		Map<String, String[]> parameterMap = request.getParameterMap();

		int flag = 0;
		int i = 0;
		for (Entry<String, String[]> entry : parameterMap.entrySet()) {

			if (entry.getKey().equals("title")) {
				String value = entry.getValue()[0].trim();
				title_keywordlist = digui(value, sensitiveWord,
						title_keywordlist, value.length());
				removeDuplicate(title_keywordlist);
				keywordmap.put(entry.getKey(), title_keywordlist.toString());
			}
			// --------------------分隔符------------------------------
			if (entry.getKey().equals("content_md")) {
				String value = entry.getValue()[0].trim();
				content_md_keywordlist = digui(value, sensitiveWord,
						content_md_keywordlist, value.length());
				removeDuplicate(content_md_keywordlist);
				keywordmap.put(entry.getKey(),
						content_md_keywordlist.toString());
			}
			// --------------------分隔符------------------------------
		}
		// String[] value = entry.getValue();
		// if(value.length == 1 )
		// for (String word : sensitiveWord) {
		// // Pattern pattern = Pattern.compile(word); //
		// // 编译regex这个正则表达式，得到代表此正则表达式的对象
		// // Matcher m = pattern.matcher(value[0]);
		// // if (m.find()) { // 匹配器的find方法若返回true，则客户机提交的数据里面有和正则表达式相匹配的内容
		// // logger.info(entry.getKey() + " : " + entry.getValue()[0]
		// // + " : 属于敏感词类");
		// //
		// // }
		// if (word.length() >= 2) {
		// if (value[0].toUpperCase().indexOf(word.toUpperCase()) != -1) {
		// logger.info(entry.getKey() + " : "
		// + entry.getValue()[0] + " : 含有敏感词类----》" + word);
		// keywordlist.add(word);
		// }
		// }
		// }
		if (title_keywordlist.size()>0||content_md_keywordlist.size()>0) {
			request.setAttribute("content_md_keywordlist",
					content_md_keywordlist);
			request.setAttribute("title_keywordlist", title_keywordlist);
			request.setAttribute("wordmap", keywordmap);
			responseOutWithJson(httpResponse, keywordmap);
			return;
		}
		chain.doFilter(request, response);

	}

	public void init(FilterConfig filterConfig) throws ServletException {
		// TODO Auto-generated method stub
		try {
			String path = WordsFilter.class.getClassLoader()
					.getResource("sensitive").getPath();
			File[] files = new File(path).listFiles();
			for (File file : files) {
				if (!file.getName().endsWith(".txt")) {
					continue;
				}
				BufferedReader br = new BufferedReader(new FileReader(file));
				String line = null;
				while ((line = br.readLine()) != null) {
					// String[] s = line.split("\\|");
					sensitiveWord.add(line);
				}
			}
			System.out.println("敏感词库加载完成......"); // 作断点调试用
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

	// json转换
	protected void responseOutWithJson(HttpServletResponse response,
			Object responseObject) {
		// 将实体对象转换为JSON Object转换
		JSONObject responseJSONObject = JSONObject.fromObject(responseObject);
		// String str = responseJSONObject.toString();
		// String[] str = responseJSONObject.toString().split("{","}");
		// JSONArray responseJSONObject = JSONArray.fromObject(responseObject);
		response.setCharacterEncoding("UTF-8");
		response.setContentType("application/json; charset=utf-8");
		PrintWriter out = null;
		System.out.println(responseJSONObject.toString());
		try {
			out = response.getWriter();
			out.write(responseJSONObject.toString());
			logger.debug("对应的敏感字词是\n");
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			if (out != null) {
				out.close();
			}
		}
	}

	// 递归
	private List<String> digui(String value, List<String> sensitiveWord,
			List<String> title_keywordlist, int length) {
		int flag = 0;
		int len = value.length() - 1;
		for (String word : sensitiveWord) {
			if (word.length() >= 2 && flag < value.length()
					&& value.length() >= 2) {
				// 进行比较
				flag = value.toUpperCase().indexOf(word.toUpperCase());
				// 如果value里面包含了word
				if (flag != -1) {
					// value去掉查到的敏感字
					value = value.substring(0, flag)
							+ value.substring(flag + word.length(),
									value.length());
					// flag = flag+word.length()+1;
					logger.info("title : " + value + " : 含有敏感词类----》" + word);
					title_keywordlist.add(word);
					title_keywordlist = digui(value, sensitiveWord,
							title_keywordlist, value.length());
				} else if (len <= 0)
					break;
			}
		}
		return title_keywordlist;
	}

	// 删除重复数据
	public static void removeDuplicate(List<String> list) {
		for (int i = 0; i < list.size() - 1; i++) {
			for (int j = list.size() - 1; j > i; j--) {
				if (list.get(j).equals(list.get(i))) {
					list.remove(j);
				}
			}
		}
		System.out.println(list);
	}
}
