package com.ihunanren.common.spider;

import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Strings.isNullOrEmpty;

import java.util.Arrays;
import java.util.Set;

/**
 * @author kalman03
 */
public class WebSpiderParam {
	/** 开始抓取页面 */
	private String baseUrl;
	/** 线程数 */
	private int thread = 2;
	/** 爬虫深度,默认为2 */
	private int deep = 2;
	/** 爬取过程中的业务处理 */
	private Set<WebSpiderListener> listeners;
	/** 需要爬取的页面满足的条件正则，为空表示爬取页面的全部链接进行深度爬取 */
	private String[] linkRegex;
	
	public void check(){
		checkArgument(!isNullOrEmpty(baseUrl));
		checkArgument(thread<=10);
		checkArgument(deep>=1);
//		checkArgument(CollectionUtils.isNotEmpty(listeners));
//		checkNotNull(linkRegex);
	}

	public String getBaseUrl() {
		return baseUrl;
	}

	public void setBaseUrl(String baseUrl) {
		this.baseUrl = baseUrl;
	}

	public int getThread() {
		return thread;
	}

	public void setThread(int thread) {
		this.thread = thread;
	}

	public int getDeep() {
		return deep;
	}

	public void setDeep(int deep) {
		this.deep = deep;
	}

	public Set<WebSpiderListener> getListeners() {
		return listeners;
	}

	public void setListeners(Set<WebSpiderListener> listeners) {
		this.listeners = listeners;
	}

	public String[] getLinkRegex() {
		return linkRegex;
	}

	public void setLinkRegex(String[] linkRegex) {
		this.linkRegex = linkRegex;
	}

	@Override
	public String toString() {
		return "WebSpiderParam [baseUrl=" + baseUrl + ", thread=" + thread
				+ ", deep=" + deep + ", listeners=" + listeners
				+ ", linkRegex=" + Arrays.toString(linkRegex) + "]";
	}
}
