package com.parq.cs.crawl;

import java.util.Vector;

import org.apache.log4j.Logger;
import org.w3c.dom.NodeList;

import com.parq.ie.google.customsearch.CSResult;

public class CSCrawlerController {

	private static Logger log = Logger.getLogger(CSCrawlerController.class);
	private Vector<CSCrawler> available;
	private Vector<CSCrawlResult> results;
	private long searchId;
	
	public void jobCompleted(CSCrawler crawler) {
		available.add(crawler);
	}

	public CSCrawlerController(CSResult result,long searchId)throws Exception {
		this.searchId=searchId;
		this.results=new Vector<CSCrawlResult>();
		available=new Vector<CSCrawler>();
		
		NodeList list = result.getResults();

		for (int i = 0; i < list.getLength(); i++) {
			String url = list.item(i).getNodeValue();
			log.info("\t " + i + ": " + url);
			CSCrawler c = new CSCrawler("CrawlJob" + i, this);
			c.setSearchId(searchId);
			c.setUrl(url);
			long st = System.currentTimeMillis();
			Thread th = new Thread(c);
			th.start();
			while (!available.contains(c)) {
				if ((System.currentTimeMillis() - st) > 20000) {
					th.stop();
					break;
				}
			}

		}
	}

	public boolean add(CSCrawlResult e) {
		return results.add(e);
	}

	public Vector<CSCrawlResult> getResults() {
		return results;
	}

	public void setResults(Vector<CSCrawlResult> results) {
		this.results = results;
	}

	
}
