package org.nerve.study.sprider.crawler4j;

import java.util.Arrays;

/**
 * org.nerve.study.sprider.crawler4j
 * Created by zengxm on 2016/3/17 0017.
 */
public class Starter {
	public static void main(String[] args) throws Exception {
		if(args.length>1){
			CrawlWorker worker=new CrawlWorker();
			worker.setUseProxy(Boolean.valueOf(args[0]));
			worker.setDeep(Integer.parseInt(args[1]));
			worker.addSeeds(Arrays.copyOfRange(args, 2, args.length));

			System.out.println(Arrays.toString(Arrays.copyOfRange(args, 2, args.length)));

			System.out.println("user proxy:"+worker.isUseProxy());
			System.out.println("crawler deep:"+worker.getDeep());
			System.out.println("urls:");
			for(String s:worker.getSeeds())
				System.out.println("    "+s);

			worker.start();
		}else{
			System.out.println("请输入参数：{是否使用代理，true or false} {采集级数} {采集url，可以多个}");
		}
	}
}
