/**
 * @author Fang Lu, fanglupku@gmail.com
 * 2011-3-20
 */
package code.extract.training;

import java.io.File;
import java.util.ArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import code.extract.CEConfig;

import net.GooglePageParserRegexp;
import net.GoogleResultItem;
import net.PageFetcher;


public class TrainingDataCrawler {
	public static void main(String args[]) {
		String linklistFilename = CEConfig.getGoogleLinklistFilename();
		String crawledKeywordFilename = CEConfig.getTrainingKeywordCrawledListFilename();
		String crawledLinklistFilename = CEConfig.getTrainingLinkCrawledListFilename();
		int threadNum = CEConfig.getTrainingCrawlThreadNum();
		String savePath = CEConfig.getTrainingOriginalSavepath();
		PageLinkList pageLinklist = new PageLinkList(linklistFilename, crawledKeywordFilename, crawledLinklistFilename, null);
		PageCrawlThread.initPageLinkList(pageLinklist);
		PageCrawlThread.initThreadNum(threadNum);
		PageCrawlThread threads[] = new PageCrawlThread[threadNum];
		for (int i=0; i<threads.length; ++i){
			threads[i] = new PageCrawlThread(savePath, i);
		}
		ExecutorService executor = Executors.newCachedThreadPool();
		for (int i=0; i<threads.length; ++i) {			
			executor.execute(threads[i]);
		}
		executor.shutdown();
	}
}
