package org.nerve.sprider.crawler4j;

import org.nerve.sprider.worker.ExportToFileWorker;
import org.nerve.sprider.worker.Worker;

import java.util.Arrays;

/**
 * org.nerve.study.sprider.crawler4j
 * Created by zengxm on 2016/3/17 0017.
 */
public class Starter {
	public static void main(String[] args) throws Exception {
		System.out.println(Arrays.toString(args));
		if(args.length>=1){
			if(args[0].equalsIgnoreCase("export")){
				export();
			}else{
				crawler(args);
			}
		}else{
			System.out.println("请输入参数：{是否使用代理，true or false} {采集级数} {onion} {采集url，可以多个}");
		}
	}

	public static void export() throws Exception {
		Worker worker=new ExportToFileWorker();
		worker.doWork(null);
	}

	public static void crawler(String[] args)throws Exception{
		CrawlWorker worker=new CrawlWorker();
		worker.setUseProxy(Boolean.valueOf(args[0]));
		worker.setDeep(Integer.parseInt(args[1]));
		if("onion".equalsIgnoreCase(args[2]))
			worker.setCrawlerCls(OnionCrawler.class);

		worker.addSeeds(Arrays.copyOfRange(args, 3, args.length));

		System.out.println(Arrays.toString(Arrays.copyOfRange(args, 3, args.length)));

		System.out.println("user proxy:"+worker.isUseProxy());
		System.out.println("crawler deep:"+worker.getDeep());
		System.out.println("crawler class:"+worker.getCrawlerCls().getName());
		System.out.println("urls:");
		for(String s:worker.getSeeds())
			System.out.println("    " + s);

		worker.start();
	}
}
