package com.util.timerTask;

import java.io.UnsupportedEncodingException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;

import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;

import com.Entity.KnowledgeInfo;
import com.mysql.jdbc.Connection;
import com.mysql.jdbc.PreparedStatement;
import com.util.textProcess.ConnectionPool;
import com.util.textProcess.NLPIR;
import com.util.textProcess.NLPIR.CLibrary;

public class CollectInfo extends BreadthCrawler {

	// 定义爬取内容的类别ID
	private static short categoryid = 1;

	// 定义一次爬虫新增的文章数
	private static int k = 0;

	static ConnectionPool pool = null;
	static Connection conn;
	static PreparedStatement stmt;
	private static CLibrary nLPIR = NLPIR.getInstance();

	KnowledgeInfo knowledgeinfo = new KnowledgeInfo();

	public CollectInfo(String crawlPath, boolean autoParse) {
		super(crawlPath, autoParse);
		// TODO Auto-generated constructor stub
	}

	@Override
	public void visit(Page page, CrawlDatums next) {
		// TODO Auto-generated method stub
		// 使得方法线程安全
		synchronized (this) {
			if (page.getUrl().matches(
					"http://health.sohu.com/\\d{8}\\/.*.shtml")) {
				knowledgeinfo.setCategoryId(categoryid);
				knowledgeinfo.setInfoTitle(page.select("h1").text());
				knowledgeinfo.setInfoUrl(page.getUrl());
				knowledgeinfo.setInfoAbstarct(page.select(
						"meta[name=description]").attr("content"));
				knowledgeinfo.setInfoFetchTime(new Timestamp(System
						.currentTimeMillis()));
				knowledgeinfo.setContentBody(page.select("div[id=contentText]")
						.first().text());
				knowledgeinfo.setInfoTime(page.select("span[id=pubtime_baidu]")
						.first().text());
				knowledgeinfo.setFingerPrint(nLPIR
						.NLPIR_FingerPrint(knowledgeinfo.getContentBody()));

				String sql1 = "select infoId from knowledgeInfo where fingerPrint="
						+ knowledgeinfo.getFingerPrint();

				try {
					stmt = (PreparedStatement) conn.prepareStatement(sql1);
					ResultSet rs = stmt.executeQuery(sql1);
					if (rs.next()) {
						LOG.info("文章已存在！");
					} else {
						// 去停用词
						// String
						// stopWordPath=System.getProperty("user.dir")+"\\file\\StopWord.txt";
						int addStopWordFlag = nLPIR
								.NLPIR_ImportKeyBlackList(NLPIR.stopWordPath);
						if (addStopWordFlag == 0) {
							LOG.info("去停用词失败！");
						} else {
							LOG.info("去停用词成功！");
						}
						knowledgeinfo.setNlpirKeyWords(nLPIR.NLPIR_GetKeyWords(
								knowledgeinfo.getContentBody(), 10, false));

						String sql2 = "insert into knowledgeInfo(categoryId,infoUrl,infoTitle,infoAbstarct,infoFetchTime,fingerPrint,nlpirKeyWords,contentBody,infoTime) values(?,?,?,?,?,?,?,?,?)";
						stmt = (PreparedStatement) conn.prepareStatement(sql2);
						stmt.setInt(1, knowledgeinfo.getCategoryId());
						stmt.setString(2, knowledgeinfo.getInfoUrl());
						stmt.setString(3, knowledgeinfo.getInfoTitle());
						stmt.setString(4, knowledgeinfo.getInfoAbstarct());
						stmt.setTimestamp(5,
								(Timestamp) knowledgeinfo.getInfoFetchTime());
						stmt.setLong(6, knowledgeinfo.getFingerPrint());
						stmt.setString(7, knowledgeinfo.getNlpirKeyWords());
						stmt.setString(8, knowledgeinfo.getContentBody());
						stmt.setString(9, knowledgeinfo.getInfoTime());
						int flag = stmt.executeUpdate();
						if (flag > 0) {
							k++;
							LOG.info("文章保存成功！");
						} else {
							LOG.info("文章保存失败！");
						}
					}
					rs.close();
				} catch (SQLException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
		}
	}

	// public static void main(String[] args) throws Exception {
	public static void startCrawl() throws Exception {
		// 初始化NLPIR
		int init_flag;
		try {
			init_flag = NLPIR.init_NLPIR();
			if (init_flag == 0) {
				LOG.info("NLPIR初始化失败！");
			} else {
				LOG.info("NLPIR初始化成功！");

				pool = ConnectionPool.getInstance();
				conn = (Connection) pool.getConnection();

				/****************************** 种子添加 *****************************/
				String[] categories = new String[] { "neike", "waike",
						"gyneoopathy", "child", "cancer", "shipin",
						"erbihouke", "yanke1", "kouqiangke", "zhongyi",
						"tijian", "guke", "nanke", "pifuke", "xinlike", "paobu" };
				for (int i = 0; i < categories.length; i++) {
					categoryid = (short) (i + 1);
					try {
						startCrawlThread(categories[i]);
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
				}

				LOG.info("本次共新增" + k + "篇文章！");
			}
		} catch (UnsupportedEncodingException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	private static void startCrawlThread(String category) throws Exception {
		CollectInfo crawler = new CollectInfo("depth_crawler", true);
		String url = "http://health.sohu.com/" + category + "/index.shtml";
		// 添加种子
		crawler.addSeed(url);
		crawler.addRegex("http://health.sohu.com/\\d{8}\\/.*.shtml");
		crawler.addRegex("-.*\\.(jpg|png|gif).*");
		crawler.addRegex("-.*#.*");
		crawler.setRetry(5);
		crawler.setMaxRetry(10);
		crawler.setTopN(100);
		// 多线程
		crawler.setThreads(20);
		crawler.start(2);
	}

}
