/** 
 * File-Name:Crawler.java
 *
 * Created on 2010-11-20 下午06:22:24
 * 
 * @author: Neo (neolimeng@gmail.com)
 * Software Engineering Institute, Peking University, China
 * 
 * Copyright (c) 2009, Peking University
 * 
 *
 */
package org.neocrawler.crawler;

import java.util.Random;

import org.apache.log4j.Logger;
import org.neocrawler.entity.WebContent;
import org.neocrawler.network.HttpClientAgent;
import org.neocrawler.parser.IParser;
import org.neocrawler.util.LinkDatabase;

/**
 * Description:
 * 
 * @author: Neo (neolimeng@gmail.com) Software Engineering Institute, Peking
 *          University, China
 * @version 1.0 2010-11-20 下午06:22:24
 */
public class CrawlerThread implements Runnable {
	private static Logger log = Logger.getLogger(CrawlerThread.class);
	private static final Long DEFAULT_SLEEP_TIME = CrawlerManager
			.getConfiguration().getSleepTime();
	private static final Long DEFAULT_SLEEP_TIME_RANDOM = CrawlerManager
			.getConfiguration().getSleepTimeRandom();

	public void crawl() {
		// 循环条件：待抓取的链接不空
		while (!LinkDatabase.unVisitedWebContentsEmpty()) {
			try {
				Random random = new Random();
				Long randomLong = random.nextLong() % DEFAULT_SLEEP_TIME_RANDOM;
				if (randomLong < 0) {
					randomLong = 0L;
				}
				log.info(Thread.currentThread().getName() + "\t Sleep "
						+ (DEFAULT_SLEEP_TIME + randomLong) + "~~ZzZzZzZz~~~~"
						+ "未抓取的WebContent个数为"
						+ LinkDatabase.getUnvisitedWebContentNum());
				Thread.sleep(DEFAULT_SLEEP_TIME + randomLong);

				// ==================== 可配置为新的Parser ================== //
				IParser parser = null;
				try {
					parser = (IParser) Class.forName(
							CrawlerManager.getConfiguration()
									.getParserClassName()).newInstance();
				} catch (Exception e) {
					e.printStackTrace();
				}
				// ==================== 可配置为新的Parser ================== //

				// 队头 URL 出对
				WebContent visitWebContent = LinkDatabase
						.unVisitedWebContentDeQueue();
				if (visitWebContent == null) {
					log.error("错误！！visitWebContent为空，停止！");
					break;
				}

				String webPage = HttpClientAgent.getMethod(visitWebContent,
						CrawlerManager.getConfiguration().getStorePath());

				// 该 url 放入到已访问的 URL 中
				LinkDatabase.addVisitedWebContent(visitWebContent);

				// 提取出下载网页中的 URL
				LinkDatabase.addUnvisitedWebContents(parser.parse(
						visitWebContent, webPage));

			} catch (Exception e) {
				e.printStackTrace();
			}
		}
	}

	@Override
	public void run() {
		log.info("名为" + Thread.currentThread().getName() + "的Crawler线程启动：：：");
		try {
			crawl();
		} catch (Exception e) {
			e.printStackTrace();
		}
		log.info("名为" + Thread.currentThread().getName() + "的Crawler线程结束！！！");
	}
}
