package crawler;

import java.sql.Connection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;

import persist.APIDB;
import persist.RefPageDB;
import code.extract.CEConfig;
import dao.PageDao;
import dao.entity.PageEntity;

public class CrawlItemPool {
	private static ArrayList<CrawlItem> URL_to_Crawl= new ArrayList<CrawlItem>();
	private static int index = 0;// to record the index for next crawling

	public static void generateCrawlList(Connection conn){
		// get all urls to fetch
		HashSet<String> linkSet = PageDao.getLinkSet(conn, PageEntity.NOT_CRAWLED);
		Random random = new Random();
		Iterator<String> itor = linkSet.iterator();
		
		URL_to_Crawl = new ArrayList<CrawlItem>();
		while(itor.hasNext()){
			URL_to_Crawl.add(new CrawlItem(itor.next(), random.nextInt()));
		}
		int size = URL_to_Crawl.size();
		System.out.println("Number of urls to crawl:" + size);
		
		//Sort the crawlList randomly according to their random generated id
		Collections.sort(URL_to_Crawl);
	}
	public static void generateCrawlList(RefPageDB refDB, ArrayList<String> linkList){
		// get all urls to fetch
		URL_to_Crawl = getURL_to_Crawl(refDB, linkList);
		int size = URL_to_Crawl.size();
		System.out.println("Number of urls to crawl:" + size);
		
		//Sort the crawlList randomly according to their random generated id
		Collections.sort(URL_to_Crawl);
	}
	private static ArrayList<CrawlItem> getURL_to_Crawl(RefPageDB refDB, ArrayList<String> fullList) {
//		APIDB apidb = new APIDB(CEConfig.getAPIBDBDirname(), 1024 * 1024,
//				"utf8");
//
//		ArrayList<String> fullList = apidb.getAllLinkList();
//		apidb.close();

		HashSet<String> crawledURL = refDB.getAllCrawledURLSet();

		System.out.println(fullList.size());

		ArrayList<CrawlItem> crawlList = new ArrayList<CrawlItem>();
		int count = 0;
		Random random = new Random();// to generate randomID for CrawlItem
		for (String url : fullList) {
			try {
				try {
					// check whether the url has been crawled
					if (!crawledURL.contains(url)) {
						crawlList.add(new CrawlItem(url, random.nextInt()));
						if (count++ % 100 == 0) {
							System.out.println("Adding the " + count
									+ "th url to crawl");							
						}
//						if(count==100){
//							break;//for test, only crawl 100 urls
//						}
					}
				} catch (Exception e) {
					e.printStackTrace();
				}
			} catch (Exception e1) {
				// TODO Auto-generated catch block
				e1.printStackTrace();
			}
		}
		return crawlList;
	}
	/**
	 * Get next item to crawl, meantime, update the url which was just crawled by the corresponding thread
	 * @param hasCrawled url which was just crawled by the corresponding thread
	 * @return CrawlItem the item to crawl
	 */
	public static synchronized CrawlItem getNextCrawlItem(CrawlItem hasCrawled){
		if(hasCrawled!=null){
			URL_to_Crawl.get(hasCrawled.index).setCrawled(true);
		}
		//no more items
		if(index>=URL_to_Crawl.size()){
			return null;
		}
		CrawlItem next = URL_to_Crawl.get(index);
		next.index = index++;
		return next;
	}
}
