/**
 * 
 */
package com.qunar.crawler.utils;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.httpclient.Cookie;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.cookie.CookiePolicy;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.RandomUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * @author tao.zhang
 * 
 */
public class Crawler extends TimerTask implements Serializable {

	private static HttpClient hc = new HttpClient();
	private static Pattern compile = Pattern
			.compile("<meta.*?charset=(\")?(.*?)\".*?>");
	private static String originUrl = "http://t.qq.com/xuyw_bj";
	private static String moreUrl = "http://breadtrip.com/explore/new_live_more/?count=12&before=";
	private static int intervalDay = 2;

	private static final Logger logger = LoggerFactory.getLogger(Crawler.class);

	static {
		logger.info("爬虫启动…………");
		logger.info("初始化httpClient…………");
		/** ----------初始化 httpClient--------- */
		hc.getHttpConnectionManager().getParams()
				.setConnectionTimeout(100 * 120);
		hc.getHttpConnectionManager().getParams().setSoTimeout(1000 * 180);
		hc.getHostConfiguration().setHost("www.breadtrip.com");
		hc.getParams().setParameter("http.protocol.cookie-policy",
		        CookiePolicy.BROWSER_COMPATIBILITY);
		intervalDay = StringUtils.isBlank(ConfigUtils.getIntervalDay()) ? 2
				: Integer.valueOf(ConfigUtils.getIntervalDay());
		

	}

	@Override
	public void run() {

		// 根据图片地址中的URL中的时间来判断，每次抓取一天的数据，循环抓取，直到URL中的日期不再小于前天的日期为止。
		// 经分析对方界面，每次下拉取数据时要发送一个before参数，before是上次请求返回的一个时间参数，data-before

		// 存放html
		StringBuilder page = new StringBuilder();
		// bread
		List<Bread> breads = new ArrayList<Bread>();
		// 下次取数据开始时间
		StringBuilder dataBefore = new StringBuilder();

		Calendar c = Calendar.getInstance();

		c.add(Calendar.DATE, -intervalDay);

		// 首次抓取
		page.append(loopGet(originUrl));

		// 从html中提取需要的信息，放在list中
//		HtmlPossesserBread.digSubNodes(page.toString(), breads);
		JsoupPossesserWeather.digInfo(page, breads);
		// 从html中提取data-before
		dataBefore
				.append(HtmlPossesserBread.digFirstBeforeDate(page.toString()));
		// 前天的日期
		String theDayBeforeYesteday = new SimpleDateFormat("yyyy/MM/dd")
				.format(c.getTime());
		if (CollectionUtils.isNotEmpty(breads)) {

			// 当list中最后一个图片URL中的data小于前天的日期，持续抓取
			while (breads.get(breads.size() - 1).getDate()
					.compareTo(theDayBeforeYesteday) > 0) {
				page.setLength(0);
				System.out.println("本次抓取发送时间（before）======"
						+ dataBefore.toString());
				page.append(loopGet(moreUrl + dataBefore.toString()));

				if (StringUtils.isNotBlank(page.toString())) {

					breads.clear();
					HtmlPossesserBread.digSubNodes(page.toString(), breads);
					dataBefore.setLength(0);
					dataBefore.append(HtmlPossesserBread.digBeforeDate(page
							.toString()));
					System.out.println("本次抓取得到时间（before）======"
							+ dataBefore.toString());
				}

				logger.info("此次共爬到(count):" + breads.size() + "个图片！"
						+ Calendar.getInstance().getTime().toString());

				/** ---添加到数据库----- */
				HtmlPossesserBread.addPic2DB(breads);
			}
			c.add(Calendar.DATE, 1);
			String yesteday = new SimpleDateFormat("yyyy/MM/dd").format(c
					.getTime());
			logger.info("----------" + intervalDay + " 天图片抓取结束");

		}
		/** ----url内存去重---- */
		// if (CollectionUtils.isNotEmpty(urlList)) {
		// Set set = new HashSet(urlList);
		// urlList.clear();
		// urlList.addAll(set);
		// }
		// for (String url : urlList) {
		// logger.info(url);
		// }
	}

	private static String get(String url) throws HttpException, IOException {

		try {
			// 防止频繁抓取
			Thread.sleep(1000 * RandomUtils.nextInt(3));
		} catch (InterruptedException e1) {
			e1.printStackTrace();
		}
		GetMethod httpGet = null;
		String page = null;

		//ProxySetting ps = ConfigUtils.getRandomProxySetting();
		try {

			/** ----------初始化Get---------------- */
			httpGet = new GetMethod(url);
			httpGet.setRequestHeader("Accept",
					"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8");
			httpGet.setRequestHeader("Accept-Language", "zh-cn");
			httpGet.setRequestHeader("Cache-Control", "max-age=0");
			httpGet.setRequestHeader("Connection", " Keep-Alive");
			httpGet.setRequestHeader("Referer","http://t.qq.com/xuyw_bj");
			httpGet.setRequestHeader("User-Agent", ConfigUtils.getRandomAgent());

			/** -------------发起http请求----------------- */
			logger.info("/**-------------发起http请求-----------------*/");
			Cookie[] cookies = hc.getState().getCookies();
			if (ArrayUtils.isNotEmpty(cookies)) {
				hc.getState().addCookies(cookies);
			}
//
//			logger.info("current proxy:" + ps.getIp() + " prot: "
//					+ ps.getProt());
//			// 改变代理
//			hc.getHostConfiguration().setProxy(ps.getIp(), ps.getProt());
			int statusCode;
			statusCode = hc.executeMethod(httpGet);
			logger.info("请求状态：" + statusCode);

			// while (statusCode != 200) {
			// ps = ConfigUtils.getRandomProxySetting();
			// hc.getHostConfiguration().setProxy(ps.getIp(), ps.getProt());
			// logger.info("change to new proxy:" + ps.getIp() + " prot: "
			// + ps.getProt());
			// statusCode = hc.executeMethod(httpGet);
			// logger.info("请求状态：" + statusCode);
			// }
			logger.info("/**-------------http请求结束-----------------*/");

			if (statusCode == HttpStatus.SC_OK) {
				InputStream is = new BufferedInputStream(
						httpGet.getResponseBodyAsStream());

				List<Byte> list = new ArrayList<Byte>(147216);
				int read;
				while ((read = is.read()) != -1) {
					list.add((byte) read);
				}

				byte[] bs = new byte[list.size()];
				for (int i = 0; i < list.size(); i++)
					bs[i] = list.get(i).byteValue();

				String charset = "utf-8";
				page = new String(bs, charset);
				Matcher matcher = compile.matcher(page);
				while (matcher.find()) {
					charset = matcher.group(2);
				}
				if (!charset.equals("utf-8"))
					page = new String(bs, charset);

			}else{
				logger.error("http status is not 200");
				throw new HttpException();
			}

		} finally {
			if (null != httpGet)
				httpGet.abort();
		}
		return page;
	}

	// 循环访问，避免出现网络请求超时或者，读取超时异常时程序中断
	private String loopGet(String url) {

		String html = null;
		while (StringUtils.isBlank(html)) {
			try {
				html = get(url);
			} catch (HttpException e) {
				System.out.println("httpGet连接异常");
				ProxySetting ps = ConfigUtils.getRandomProxySetting();
				// 改变代理
				hc.getHostConfiguration().setProxy(ps.getIp(), ps.getProt());
			} catch (IOException e) {
				System.out.println("httpGetIO异常");
				ProxySetting ps = ConfigUtils.getRandomProxySetting();
				// 改变代理
				hc.getHostConfiguration().setProxy(ps.getIp(), ps.getProt());
			}
		}

		return html;
	}

}
