package edu.hit.crawler;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

/**
 * The Main class of ICrawler
 * 
 * @author
 */
@SuppressWarnings("deprecation")
public class Crawler extends Configured implements Tool {

	public static Logger logger = Logger.getLogger(Crawler.class);

	public static void main(String[] args) throws Exception {

		// logger.info(System.getProperties());
		int res = ToolRunner.run(new Configuration(), new Crawler(), args);
		System.exit(res);
	}

	/**
	 * Launch the crawl sequence using the given arguments
	 */
	@Override
	public int run(String[] args) throws Exception {

		/*
		 * args[0]: seed file args[1]: 抓取层数 args[2]: 自定义配置文件
		 */

		getConf().addResource(new Path(args[2]));

		System.out.println(getConf());
		getConf().set("org.work.crawler.seeds", args[0]);

		// make sure our working directory is empty
		String workdir = getConf().get("org.work.crawler.dir");
		System.out.println(workdir);

		// Is the first time to crawler?
		/*
		 * boolean recrawler =
		 * getConf().getBoolean("org.work.crawler.reCrawler", true);
		 */
		boolean isRestore = getConf().getBoolean("org.work.crawler.isRestore",
				false);

		isRestore = true;
		
		int res = 0;
		
		if (!isRestore) {
			FileSystem fs = FileSystem.get(getConf());
			if (!fs.exists(new Path(workdir))) {
				System.out.println("Re-create Crawler folder.");
				fs.mkdirs(new Path(workdir));
			}

		
			// inject the initial seeds

			if (fs.exists(new Path(workdir + "in")))
				fs.delete(new Path(workdir + "in"), true);

			fs.copyFromLocalFile(
					new Path(getConf().get("org.work.crawler.seeds")),
					new Path(workdir + "in/init.txt"));

			// 初始化
			System.out.println("inject seeds to crawl_db...");
			res = ToolRunner.run(getConf(), new InjectDriver(), args);
			if (res != 0)
				return res;
			System.out.println("inject success!");

		}
		
		
		int numTurn = Integer.parseInt(args[1]);
		// launch the crawl sequence, we will crawl numTurn turns
		for (int i = 0; i < numTurn; i++) {

	

			System.out.println("selecting...");
			res = ToolRunner.run(getConf(), new SelectDriver(), args);
			if (res != 0)
				return res;

			System.out.println("downloading...");
			res = ToolRunner.run(getConf(), new CrawlDriver(), args);
			if (res != 0) {
				return res;
			}

			System.out.println("parsing&extracting...");
			res = ToolRunner.run(getConf(), new ParserDriver(), args);
			if (res != 0)
				return res;

			System.out.println("Update Crawl_Db...");
			res = ToolRunner.run(getConf(), new UpdateDb(), args);
			if (res != 0)
				return res;

			System.out.println("The " + (i + 1)
					+ "th level crawling finished\n");
		}

		return res;

	}
}
