package com.bmth.alise.client;

import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSON;
import com.bmth.alise.client.config.Configuration;
import com.bmth.alise.client.utils.ElasticsearchTemplate;
import com.bmth.alise.client.utils.RedisUtils;

import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.scheduler.RedisScheduler;
import us.codecraft.webmagic.utils.UrlUtils;

public class TestPageProcessor implements PageProcessor {
	private Logger logger = LoggerFactory.getLogger(getClass());
	private final String PARENT_URL = "parent_url";
	private Site site;

	public TestPageProcessor(Site site) {
		this.site = site;
	}

	@Override
	public void process(Page page) {
		logger.debug("url[" + page.getRequest().getUrl() + "]" + "  code [" + page.getStatusCode() + "]");
		try {
			String url = page.getRequest().getUrl();
			List<String> all = page.getHtml().links().regex("(" + UrlRegexMapping.getIncludeRegex(url) + ")").all();

			for (String outUrl : all) {
				if (!outUrl.startsWith("#")) {
					if (UrlRegexMapping.filter(outUrl)) {
						outUrl = UrlUtils.canonicalizeUrl(outUrl, url.toString());
						if (outUrl.endsWith("#")) {
							outUrl = UrlUtils.canonicalizeUrl(outUrl, url.toString());
						}
						Request request = new Request(outUrl);
						request.putExtra(PARENT_URL, url);
						page.addTargetRequest(request);
					}
				}
			}

			String parentUrl = (String) page.getRequest().getExtra(PARENT_URL);
			String html = page.getHtml().toString();
			String digest = DigestUtils.shaHex(url);
			Map<String, Object> m = new HashMap<>();
			m.put("parent_url", parentUrl);
			m.put("digest", digest);
			m.put("status_code", page.getStatusCode());
			m.put("url", url);
			m.put("html", html);
			m.put("index_date", new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
			m.put("host", UrlUtils.getDomain(url));
			String jsonString = JSON.toJSONString(m);

			ElasticsearchTemplate.addDoc(digest, jsonString);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	@Override
	public Site getSite() {
		return site;
	}

	public static void main(String[] args) {
		try {
			// 配置文件
			Configuration.init();
			UrlRegexMapping.init();
			Integer RetryTimes = Integer.parseInt(Configuration.getProperty("RetryTimes"));
			Integer SleepTime = Integer.parseInt(Configuration.getProperty("SleepTime"));
			Integer TimeOut = Integer.parseInt(Configuration.getProperty("TimeOut"));
			Integer ThreadNumber = Integer.parseInt(Configuration.getProperty("ThreadNumber"));

			Site site = Site.me().setRetryTimes(RetryTimes).setSleepTime(SleepTime).setTimeOut(TimeOut);

			// 读取种子文件，过滤规则
			InputStream ins = Thread.currentThread().getContextClassLoader().getResourceAsStream("seeds.txt");

			RedisScheduler scheduler = new RedisScheduler(RedisUtils.initPool());

			@SuppressWarnings("unchecked")
			List<String> urls = IOUtils.readLines(ins);
			String[] urlArr = urls.toArray(new String[urls.size()]);

			Spider.create(new TestPageProcessor(site)).scheduler(scheduler).setUUID("search_it").thread(ThreadNumber)
					.addUrl(urlArr).run();
			
			// Spider.create(new TestPageProcessor(site,
			// AcceptRegex)).setUUID("search_it").thread(ThreadNumber)
			// .addUrl("http://www.itpub.net/thread-1821084-1-1.html").run();

		} catch (IOException e) {
			e.printStackTrace();
		}
	}
}