package com.sentiment.servlet;

import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.shiro.SecurityUtils;
import org.apache.shiro.subject.Subject;
import org.bson.Document;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.sentiment.config.Config;
import com.sentiment.config.ConfigDb;
import com.sentiment.crawler.BaiduNewsCrawler;
import com.sentiment.database.bean.CrawlInfo;
import com.sentiment.database.bean.CrawlNews;
import com.sentiment.database.bean.Keyword;
import com.sentiment.database.bean.UserInfo;
import com.sentiment.database.codec.CrawlNewsCodec;
import com.sentiment.database.dbUtil.DbCodecDriver;
import com.sentiment.database.dbUtil.DbUtil;
import com.sentiment.dbutil.UtilDao;
import com.sentiment.report.DataStatistics;
import com.sentiment.shiro.ActiveUser;

@SuppressWarnings("serial")
public class HotInfoSupervisionServlet extends HttpServlet {

	private static final Logger LOG = LoggerFactory.getLogger(HotInfoSupervisionServlet.class);

	@Override
	protected void doGet(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {
		// TODO Auto-generated method stub
		String methodName = request.getParameter("method");
		try {
			Method method = getClass().getMethod(methodName, HttpServletRequest.class, HttpServletResponse.class);
			method.invoke(this, request, response);
		} catch (Exception e) {
			e.printStackTrace();
			LOG.error(e.toString());
		}
	}

	@Override
	protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
		// TODO Auto-generated method stub
		doGet(req, resp);
	}

	public static void main(String[] args) {
		HotInfoSupervisionServlet gh = new HotInfoSupervisionServlet();
	}

	/**
	 * 获取传入的关键词，根据关键词用爬虫爬取相关新闻信息 爬取后先存储到数据库，后返回给请求
	 * 
	 * @param request
	 * @param response
	 * @throws IOException
	 */
	public void newsInfo(HttpServletRequest request, HttpServletResponse response) throws IOException {
		String keyword = request.getParameter("wd");
		keyword = URLDecoder.decode(keyword, "UTF-8");
		Subject subject = SecurityUtils.getSubject();
		ActiveUser activeUser = (ActiveUser) subject.getPrincipal();
		String user_id;
		if (activeUser == null) {

		} else {
			user_id = activeUser.getUser_id();
			UtilDao dao = new UtilDao();
			dao.saveKeyword2user(user_id, keyword);
		}
		// 获取到关键字字符串并转码

		LOG.info("传入的关键词参数为 ： " + keyword);
		if (keyword == "" || keyword == null) {
			return;
		}

		// Keyword kwd = new Keyword(new Date(), keyword, "政治");
		// dbu.openConnection(Keyword.dbName, Keyword.collection);
		// if(!dbu.insertNotExist(kwd.toDocument(), new String[]{"keyword"})){
		// LOG.info("关键词 "+keyword +" 已存在数据库");
		// }
		// Document doc = dbu.query(kwd.toDocument(), new String[]{"keyword"});
		// kwd = new Keyword(doc);
		// dbu.closeConnection();
		//
		// 启动新闻爬虫获取新闻内容
		// NewsCrawler bnc = new NewsCrawler(Config.crawlerDataPath);
		// bnc.getSearch(keyword);
		BaiduNewsCrawler bnc = new BaiduNewsCrawler();
		bnc.getSearch(keyword, 3);
		List<CrawlNews> list = bnc.getNewsList();

		// List<CrawlInfo> weiboInfos = null;
		// 启动新浪微博获取微博内容
		// if(Config.useWeibo){
		// Sinalgoer sinalgoer = new Sinalgoer();
		// weiboInfos = sinalgoer.startSinaKeyword(keyword);
		// }
		// 将记录保存在数据库中
		// dbu.openConnection(CrawlInfo.dbName, CrawlInfo.collection);
		DbCodecDriver<CrawlNews> driver = new DbCodecDriver<CrawlNews>();
		driver.openConnection(ConfigDb.sentimentDb, CrawlNews.class, CrawlNewsCodec.class);
		if (list != null)
			for (CrawlNews n : list) {
				if (n.getText() != null)
					driver.insert(n);
			}
		driver.closeConnection();
		// if (Config.useWeibo && weiboInfos != null)
		// for (CrawlInfo ci : weiboInfos) {
		// // ci.setKeyword_id(kwd.get_id());
		// ci.setTitle("关于关键字 " + keyword + " 的微博评论 ");
		// dbu.insert(ci.toDocument());
		// }
		// dbu.closeConnection();

		// 媒体占比分析
		List<JSONObject> mediaRatio = null;
		// 转载量\ 评论量分析图
		List<JSONObject> reprintCommentRatio = null;
		// if (Config.useWeibo && weiboInfos != null) {
		// List<CrawlInfo> infos = new ArrayList<CrawlInfo>(weiboInfos);
		// infos.addAll(newsInfos);
		//
		// Map<String, Double> map = DataStatistics.mediaRatioParser(infos);
		// mediaRatio = Transaction.mediaRatioMap2Json(map);
		// }
		// try {
		// if (Config.useWeibo && weiboInfos != null)
		// jsonWeibo = mapper.writeValueAsString(weiboInfos);
		// } catch (Exception e) {
		// LOG.error(e.toString());
		// }

		// 生成输出的数据格式
		StringBuilder sbd = new StringBuilder();
		if (list != null) {
			sbd.append("{\"newslist\":[");
			for (int i = 0; i < list.size(); i++) {
				sbd.append(list.get(i).toJson() + ",");
			}
			sbd.deleteCharAt(sbd.length() - 1);
			sbd.append("]");
		}
		// if (jsonWeibo != null) {
		// sb.append(",\"weibolist\":" + jsonWeibo);
		// }
		// if (mediaRatio != null) {
		// sb.append(",\"media_ratio\":" + mediaRatio.toString());
		// } else {
		// 这是临时方法，生成随机数据
		mediaRatio = DataStatistics.mediaGraph(list);
		sbd.append(",\"media_ratio\":" + mediaRatio);
		// 评论量也是随机数据
		reprintCommentRatio = DataStatistics.reprintCommentGraph(list);
		sbd.append(",\"reprint_comment_ratio\":" + reprintCommentRatio);
		sbd.append("}");
		LOG.info(sbd.toString());

		response.setCharacterEncoding("utf-8");
		response.getWriter().print(sbd.toString());

	}

}
