package com.webmining.sherlock.crawler;

import com.webmining.sherlock.admin.Configuration;
import com.webmining.sherlock.admin.ConfigurationException;
import com.webmining.sherlock.admin.Manager;
import com.webmining.sherlock.store.Store;
import org.apache.log4j.Logger;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;

import java.io.IOException;
import java.sql.SQLException;

/**
 * Coordinates the execution of one or more Collectors for
 * indexing documents using multithreading.
 */
public class Crawler implements Job {
	
    protected Logger log;
    
    protected Collector collector[];

    /**
     * Crawling scope
     */
    protected Scope scope;
    
    /**
     * Creates a new sherlock crawler
     */
    public Crawler() throws CrawlingException {
        log = Logger.getLogger(Crawler.class);
        scope = new Scope();
    }
    
	public void execute(JobExecutionContext context)
			throws JobExecutionException {
		try {
			crawl();
		} catch (CrawlingException ce) {
			log.error("Unable to crawl", ce);
			throw new JobExecutionException("Unable to crawl");
		}
	}
	
	public void prepare() throws CrawlingException {
		prepare(true);
	}
	
	public void prepare(boolean cleanBeforeCollect) throws CrawlingException {
        Store store = null;
        try {
        	store = new Store();
        	store.open();
        	if (cleanBeforeCollect) {
        		clean(store);
        	}
        	Manager.loadInitialPages(scope, store);
    	} catch (SQLException sqle) {
    		log.error("Unable to prepare crawling", sqle);
    		throw new CrawlingException("Unable prepare crawling",
    				sqle);
        } catch(ConfigurationException e) {
          	log.error("Unable to prepare crawling", e);
           	throw new CrawlingException("Unable to prepare crawling", e);
        } catch(IOException e) {
           	log.error("Unable to prepare crawling", e);
           	throw new CrawlingException("Unable to prepare crawling", e);
        } finally {
        	store.close();
        }
	}

	public void crawl() throws CrawlingException {
		crawl(true);
	}
	
    public void crawl(boolean cleanBeforeCollect) throws CrawlingException {
        log.info("Begin crawling");
        prepare(cleanBeforeCollect);
        // Create the number of collectors specified in the configuratior for
        // crawling with its own sherlock database connections
        try {
        	int collectors = Configuration.getCrawlerCollectors();
        	collector = new Collector[collectors];
        	for (int i=0; i < collector.length; i++) {
        		collector[i] = new Collector();
                collector[i].setScope(scope);
        	}
        } catch (ConfigurationException ce) {
        	log.error("Unable to get the number of collectors to use for " +
        			"crawling", ce);
        	throw new CrawlingException("Unable to get the number of " +
        			"collectors to use for crawling", ce);
        }
        Thread collectorThread[] = new Thread[collector.length];
        for (int i=0; i < collector.length; i++) {
        	collectorThread[i] = new Thread(collector[i]);
        	collectorThread[i].start();
        }
        for (int i = 0; i < collector.length; i++) {
        	if (collectorThread[i].isAlive()) {
        		try {
        			collectorThread[i].join();
        		} catch (InterruptedException ie) {
        			log.error("Collector running interrupted", ie);
        		}
        	}
        }
        log.info("End crawling");
    }
    
    public void clean(Store store) throws CrawlingException {
    	try {
    		store.executeUpdate("delete from ox_page");
    		store.executeUpdate("delete from ox_server");
    	} catch (SQLException sqle) {
    		log.error("Unable to clean sherlock database", sqle);
    		throw new CrawlingException("Unable to clean sherlock database",
    				sqle);
    	}
    }
    
}
