package crawler;

import java.sql.Connection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import persist.APIDB;
import persist.RefPageDB;
import code.extract.CEConfig;
import dao.BaseDao;
import dao.PageDao;
import dao.entity.PageEntity;

public class BackLinkCrawler {
	public static final int GET_LINKLIST_FROM_BDB = 0;
	public static final int GET_LINKLIST_FROM_MYSQL = 1;
	/**
	 * @param args
	 *            void
	 */
	public static void main(String[] args) {
		int THREADS = CEConfig.getBacklinkCrawlerThread();
		RefPageDB refDB = new RefPageDB(CEConfig.getRefPageBDBDirname(),
				1024 * 1024, "utf8");
		refDB.open(CEConfig.getRefPageBDBDatabasename());
		int linklistModel = GET_LINKLIST_FROM_MYSQL;
		ArrayList<String> fullList = new ArrayList<String>();
		if (linklistModel == GET_LINKLIST_FROM_MYSQL){
			Connection conn = BaseDao.getTestConn();
			fullList = PageDao.getLinkList(conn, PageEntity.ALL);
		}else{
			APIDB apidb = new APIDB(CEConfig.getAPIBDBDirname(), 1024 * 1024,
			"utf8");
			fullList = apidb.getAllLinkList();
			apidb.close();
		}
		CrawlItemPool.generateCrawlList(refDB, fullList);
		
		ExecutorService exec = Executors.newFixedThreadPool(THREADS);
		// 为每个线程分配任务
		for (int j = 0; j < THREADS; j++) {
			BackLinkCrawlerThread crawlerThread = new BackLinkCrawlerThread();
			Connection conn = BaseDao.getTestConn();
			crawlerThread.setJob(j, refDB, conn);
			
			exec.execute(crawlerThread);
		}
	}
}
