package yao.spider.jsoup.vobao;

import java.io.IOException;
import java.util.List;
import java.util.concurrent.BlockingQueue;

import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.google.common.collect.Lists;
import com.google.common.collect.Range;

import yao.spider.jsoup.common.CommonConsTants;
import yao.spider.jsoup.vobao.bean.Question;

/**
 * 问题爬取（生产者）
 * @author chenyao
 * @date 2017年2月28日 下午5:15:33
 */
public class PageUrlCrawler implements Runnable {
	
	
	private final BlockingQueue<Question> urlQueue;
	private final Range<Integer> pageRange;
	private final TagEnum tagEnum;
	
	public PageUrlCrawler(BlockingQueue<Question> urlQueue, Range<Integer> pageRange, TagEnum tagEnum) {
		super();
		this.urlQueue = urlQueue;
		this.pageRange = pageRange;
		this.tagEnum = tagEnum;
	}

	@Override
	public void run() {
		List<String> pageUrls = getPageUrls();
		for(String url : pageUrls) {
			try {
				crawl(url);
			} catch (InterruptedException e) {
				Thread.currentThread().interrupt();
			}
		}		
	}

	//最大 168
	//进行爬取
	private void crawl(String url) throws InterruptedException {
		Document document = null;
		try {
			document = Jsoup.connect(url)
					.header("Accept", "*/*")
					.header("Accept-Encoding", "gzip, deflate")
					.header("Accept-Language","zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3")
					.header("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64; rv:48.0) Gecko/20100101 Firefox/48.0")
					.timeout(CommonConsTants.TIME_OUT_MS).get();
		} catch (IOException e) {
			e.printStackTrace();
			throw new RuntimeException(e);
		}
		Elements as = document.select(".evbname a");
		for(Element a : as) {
			Question q = new Question();
			q.setUrl(this.getUrl(a));
			q.setCategory(tagEnum.getName());
			System.out.println("put:" + q.getUrl());
			urlQueue.put(q);
		}
	}
	
	private String getUrl(Element a) {
		String url = "";
		if(a != null && StringUtils.isNotBlank(a.attr("href"))) {
			url = Constants.URL_DOMAIN + a.attr("href");				
		}	
		return url;
	}
	
	private List<String> getPageUrls() {
		List<String> urls = Lists.newArrayList();
		for(int i = pageRange.lowerEndpoint(); i <= pageRange.upperEndpoint(); i++) {
			String url = Constants.URL_DOMAIN_PREFIX + this.tagEnum.getUrlKey() + "/list_wen_" + i + ".shtml";
			urls.add(url);
		}
		return urls;
	}
	
	
	

}
