package cn.dawn.webcollector.crawler;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.dawn.webcollector.fetcher.DbUpdater;
import cn.dawn.webcollector.fetcher.Fetcher;
import cn.dawn.webcollector.fetcher.VisitorFactory;
import cn.dawn.webcollector.generator.Injector;
import cn.dawn.webcollector.generator.StandardGenerator;
import cn.dawn.webcollector.net.HttpRequester;
import cn.dawn.webcollector.net.HttpRequesterImpl;
import cn.dawn.webcollector.util.Config;
import cn.dawn.webcollector.util.ConfigParser;
import cn.dawn.webcollector.util.FileUtils;
import cn.dawn.webcollector.util.PropertiesConfig;

import com.sleepycat.je.Environment;
import com.sleepycat.je.EnvironmentConfig;

public abstract class Crawler implements VisitorFactory {

	public static final Logger LOG = LoggerFactory.getLogger(Crawler.class);

	protected int status;
	public final static int RUNNING = 1;
	public final static int STOPED = 2;
	protected boolean resumable = false;
	protected int threads = 50;
	protected Integer topN = null;
	//爬虫种子集合
	protected ArrayList<String> seeds = new ArrayList<String>();
	protected ArrayList<String> forcedSeeds = new ArrayList<String>();
	protected Fetcher fetcher;
	int retry = Config.retry;
	protected int maxRetry = Config.MAX_RETRY;

	protected VisitorFactory visitorFactory = this;
	protected HttpRequester httpRequester = new HttpRequesterImpl();
	//爬虫任务本地目录,在Berkeley DB中存储任务
	String crawlPath;

	Environment env;

	public Crawler(String crawlPath) {
		this.crawlPath = crawlPath;
		loadProperties();
	}

	/**
	 * 加载配置文件信息
	 */
	private void loadProperties(){
		try {
			LOG.info("加载配置信息.");
			ConfigParser.parseFromClassPath("system.properties", PropertiesConfig.class);
		} catch (Exception e) {
			LOG.debug("加载配置信息失败!.");
			e.printStackTrace();
		}
	}
	
	public void inject() throws Exception {
		Injector injector = new Injector(env);
		injector.inject(seeds);
	}

	public void injectForcedSeeds() throws Exception {
		Injector injector = new Injector(env);
		injector.inject(forcedSeeds);
	}

	public void start(int depth) throws Exception {
		File dir = new File(crawlPath);
		boolean needInject = true;

		if (resumable && dir.exists()) {
			needInject = false;
		}
		if (resumable && !dir.exists()) {
			dir.mkdirs();
		}
		if (!resumable) {
			//删除之前目录
			if (dir.exists()) {
				FileUtils.deleteDir(dir);
			}
			//创建目录
			dir.mkdirs();
			if (seeds.isEmpty() && forcedSeeds.isEmpty()) {
				LOG.info("error:Please add at least one seed");
				return;
			}
		}
		//创建Berkeley DB数据库
		EnvironmentConfig environmentConfig = new EnvironmentConfig();
		environmentConfig.setAllowCreate(true);
		env = new Environment(dir, environmentConfig);

		if (needInject) {
			inject();
		}

		if (!forcedSeeds.isEmpty()) {
			injectForcedSeeds();
		}

		status = RUNNING;
		for (int i = 0; i < depth; i++) {
			if (status == STOPED) {
				break;
			}
			LOG.info("starting depth " + (i + 1));
			long startTime = System.currentTimeMillis();

			StandardGenerator generator = new StandardGenerator(env);
			generator.setMaxRetry(maxRetry);
			generator.setTopN(topN);
			fetcher = new Fetcher();
			fetcher.setHttpRequester(httpRequester);
			fetcher.setDbUpdater(new DbUpdater(env));
			fetcher.setVisitorFactory(visitorFactory);
			fetcher.setRetry(retry);
			fetcher.setThreads(threads);
			fetcher.fetchAll(generator);
			long endTime = System.currentTimeMillis();
			long costTime = (endTime - startTime) / 1000;
			int totalGenerate = generator.getTotalGenerate();

			LOG.info("depth " + (i + 1) + " finish: \n\tTOTAL urls:\t" + totalGenerate + "\n\tTOTAL time:\t" + costTime + " seconds");
			if (totalGenerate == 0) {
				break;
			}
		}
		env.close();
	}

	public void stop() {
		status = STOPED;
		fetcher.stop();
	}

	public VisitorFactory getVisitorFactory() {
		return visitorFactory;
	}

	public void setVisitorFactory(VisitorFactory visitorFactory) {
		this.visitorFactory = visitorFactory;
	}

	public HttpRequester getHttpRequester() {
		return httpRequester;
	}

	public void setHttpRequester(HttpRequester httpRequester) {
		this.httpRequester = httpRequester;
	}

	/**
	 * 添加一个种子url(如果断点爬取，种子只会在第一次爬取时注入)
	 * @param seed 种子url
	 */
	public void addSeed(String seed) {
		seeds.add(seed);
	}

	/**
	 * 添加一个种子url(如果断点爬取，种子会在每次启动爬虫时注入， 如果爬取历史中有相同url,则覆盖)
	 * @param seed
	 */
	public void addForcedSeed(String seed) {
		forcedSeeds.add(seed);
	}

	public ArrayList<String> getSeeds() {
		return seeds;
	}

	public void setSeeds(ArrayList<String> seeds) {
		this.seeds = seeds;
	}

	public ArrayList<String> getForcedSeeds() {
		return forcedSeeds;
	}

	public void setForcedSeeds(ArrayList<String> forcedSeeds) {
		this.forcedSeeds = forcedSeeds;
	}

	public boolean isResumable() {
		return resumable;
	}

	public void setResumable(boolean resumable) {
		this.resumable = resumable;
	}

	public int getThreads() {
		return threads;
	}

	public void setThreads(int threads) {
		this.threads = threads;
	}

	public Integer getTopN() {
		return topN;
	}

	public void setTopN(Integer topN) {
		this.topN = topN;
	}

	public int getRetry() {
		return retry;
	}

	public void setRetry(int retry) {
		this.retry = retry;
	}

	public int getMaxRetry() {
		return maxRetry;
	}

	public void setMaxRetry(int maxRetry) {
		this.maxRetry = maxRetry;
	}

}
