/** 
 * File-Name:CrawlerManager.java
 *
 * Created on 2010-11-20 下午06:31:13
 * 
 * @author: Neo (neolimeng@gmail.com)
 * Software Engineering Institute, Peking University, China
 * 
 * Copyright (c) 2009, Peking University
 * 
 *
 */
package org.neocrawler.crawler;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.log4j.Logger;
import org.neocrawler.dao.WebContentDao;
import org.neocrawler.entity.WebContent;
import org.neocrawler.parser.IParser;
import org.neocrawler.util.Configuration;
import org.neocrawler.util.LinkDatabase;

/**
 * Description: manager of a crawler
 * 
 * @author: Neo (neolimeng@gmail.com) Software Engineering Institute, Peking
 *          University, China
 * @version 1.0 2010-11-20 下午06:31:13
 */
public class CrawlerManager {
	private static Logger log = Logger.getLogger(CrawlerManager.class);
	private static Configuration configuration;

	public static void start(Configuration configuration) {
		List<WebContent> seeds = new ArrayList<WebContent>();
		IParser parser = null;
		try {
			parser = (IParser) Class
					.forName(configuration.getParserClassName()).newInstance();

			// Read the seeds info.
			seeds = parser.getInitialSeeds();
			start(configuration, seeds);
		} catch (Exception e) {
			log.error("Error occurs when get the seeds!!");
			e.printStackTrace();
		}
	}

	public static void start(Configuration configuration, List<WebContent> seeds) {
		CrawlerManager.configuration = configuration;
		log
				.info("===== NeoCrawler Start! With"
						+ configuration.getThreadPoolSize()
						+ "Crawling Threads! =====");
		try {
			log.info("The number of initial seeds is " + seeds.size());
			LinkDatabase.addUnvisitedWebContents(seeds);

			// Start crawling threads.
			ExecutorService executorService = Executors
					.newFixedThreadPool(configuration.getThreadPoolSize());
			for (int i = 0; i < configuration.getThreadPoolSize(); i++) {
				log.info("Start Thread #" + i + " of total "
						+ configuration.getThreadPoolSize() + "Threads.");
				CrawlerThread crawler = new CrawlerThread();
				executorService.execute(crawler);
			}
		} catch (Exception e) {
			log.error("Error occurs when start the crawler manager!!");
			e.printStackTrace();
		}
	}

	public static void restart(Configuration configuration) {
		CrawlerManager.configuration = configuration;
		log
				.info("===== NeoCrawler ReStart! With"
						+ configuration.getThreadPoolSize()
						+ "Crawling Threads! =====");
		WebContentDao webContentDao = new WebContentDao();
		try {
			log.info("===== Add visited webcontents!");
			LinkDatabase.addVisitedWebContents(webContentDao
					.getVisitedWebCotents());

			log.info("===== Add unvisited webcontents!!");
			List<WebContent> webContents = webContentDao
					.getUnvisitedWebContents();
			if (webContents == null) {
				log
						.info("addUnvisitedWebContents returns null!! There no URL to crawl!");
				return;
			}
			LinkDatabase
					.addUnvisitedWebContentsWithoutSaveOrUpdate(webContents);

			// Start crawling threads.
			ExecutorService executorService = Executors
					.newFixedThreadPool(configuration.getThreadPoolSize());
			for (int i = 0; i < configuration.getThreadPoolSize(); i++) {
				log.info("Start Thread #" + i + " of total "
						+ configuration.getThreadPoolSize() + "Threads.");
				CrawlerThread crawler = new CrawlerThread();
				executorService.execute(crawler);
			}
		} catch (Exception e) {
			log.error("Error occurs when start the crawler manager!!");
			e.printStackTrace();
		}
	}

	public static void restart(Configuration configuration,
			List<WebContent> visitedWebContents,
			List<WebContent> unvisitedWebContents) {
		CrawlerManager.configuration = configuration;
		log
				.info("===== NeoCrawler ReStart! With"
						+ configuration.getThreadPoolSize()
						+ "Crawling Threads! =====");
		try {
			log.info("===== Add visited webcontents!");
			LinkDatabase.addVisitedWebContents(visitedWebContents);

			log.info("===== Add unvisited webcontents!!");

			if (unvisitedWebContents == null) {
				log
						.info("addUnvisitedWebContents returns null!! There no URL to crawl!");
				return;
			}
			LinkDatabase
					.addUnvisitedWebContentsWithoutSaveOrUpdate(unvisitedWebContents);

			// Start crawling threads.
			ExecutorService executorService = Executors
					.newFixedThreadPool(configuration.getThreadPoolSize());
			for (int i = 0; i < configuration.getThreadPoolSize(); i++) {
				log.info("Start Thread #" + i + " of total "
						+ configuration.getThreadPoolSize() + "Threads.");
				CrawlerThread crawler = new CrawlerThread();
				executorService.execute(crawler);
			}
		} catch (Exception e) {
			log.error("Error occurs when start the crawler manager!!");
			e.printStackTrace();
		}
	}

	public static void main(String[] args) {
		Configuration configuration = new Configuration(
				"jdbc:mysql://192.168.4.181:3307/neocrawlerdefault", "root",
				"woxnsk", Configuration.DEFAULT_STORE_PATH);
		CrawlerManager.start(configuration);
	}

	public static Configuration getConfiguration() {
		return configuration;
	}

	public static void setConfiguration(Configuration configuration) {
		CrawlerManager.configuration = configuration;
	}

}
