package yatan.crawler.single;

import java.util.List;
import java.util.Properties;

import org.apache.log4j.Logger;

import yatan.cluster.concurrent.JobDoneHandler;
import yatan.cluster.concurrent.JobPool;
import yatan.cluster.job.Job;
import yatan.cluster.job.JobDispatcher;
import yatan.cluster.job.JobResult;
import yatan.cluster.job.JobRunnerFactory;
import yatan.common.utility.ParamUtility;

public class Crawler {
    private static final String DEFAULT_CONFIG_FILE = "singleCrawler.properties";

    private Logger logger = Logger.getLogger(Crawler.class);

    private Thread workingThread;

    private JobDispatcher jobDispatcher;
    private JobPool jobPool;

    private int threadPoolSize;
    private int requestOnceJobCount;
    private int waitJobScheduleDelay;

    public Crawler() {
	this(DEFAULT_CONFIG_FILE);
    }

    public Crawler(String configFile) {
	ParamUtility.checkStringNullEmpty(configFile, "configFile");

	// load configuration file
	Properties properties = ConfigUtility.loadConfigFile(configFile);

	// config the node with the loaded property file
	config(properties);
    }

    public Crawler(Properties properties) {
	ParamUtility.checkNull(properties, "properties");

	// config the node with given properties
	config(properties);
    }

    public void start() {
	this.workingThread = new WorkingThread();
	this.workingThread.start();
    }

    private void config(Properties properties) {
	this.jobDispatcher = ConfigUtility.createObject(properties, "job_dispatcher", JobDispatcher.class);

	// read config properties
	this.threadPoolSize = ConfigUtility.readIntProperty(properties, "thread_pool_size", false, 100);
	this.requestOnceJobCount = ConfigUtility.readIntProperty(properties, "request_once_job_count", false, 100);
	this.waitJobScheduleDelay = ConfigUtility.readIntProperty(properties, "wait_job_schedule_delay", false, 60000);

	// create job runner factory
	JobRunnerFactory jobRunnerFactory = ConfigUtility.createObject(properties, "job_runner_factory",
	        JobRunnerFactory.class);

	// initialize job pool
	this.jobPool = new JobPool(this.threadPoolSize);
	this.jobPool.setJobRunnerFactory(jobRunnerFactory);
	this.jobPool.setJobDoneHandler(new SingleJobDoneHandler());
    }

    private class WorkingThread extends Thread {
	@Override
	public void run() {
	    while (true) {
		if (jobPool.getJobCount() < threadPoolSize * 1.3) {
		    logger.info("Running jobs " + jobPool.getJobCount() + " < " + threadPoolSize * 1.3
			    + " on the node, requesting new jobs...");

		    List<Job> retrievedJobs = jobDispatcher.dispathJobs(requestOnceJobCount);

		    logger.debug("Retrieved " + retrievedJobs.size() + " jobs. Submiting to thread pool...");
		    for (Job job : retrievedJobs) {
			jobPool.submit(job);
		    }
		    logger.info("Receieved " + retrievedJobs.size() + " jobs.");
		}

		logger.debug("Wait until another job is done(" + (waitJobScheduleDelay / 1000) + " seconds most)...");
		synchronized (this) {
		    try {
			wait(waitJobScheduleDelay);
		    } catch (InterruptedException e) {
			logger.warn("Cannot wait on the node object. Interrupted exception: " + e.getMessage(), e);
		    }
		}
	    }
	}
    }

    private class SingleJobDoneHandler implements JobDoneHandler {
	@Override
	public void success(JobResult jobResult) {
	    logger.debug("Job " + jobResult.getId() + " is done. Processing job result...");
	    jobDispatcher.handleJobResult(jobResult);
	    logger.debug("Result of Job" + jobResult.getId() + " is handled");

	    synchronized (workingThread) {
		workingThread.notify();
	    }
	}

	@Override
	public void error(Job job, Exception error) {
	    logger.debug("An error occured in the job " + job + ". Processing job error...");
	    jobDispatcher.handleJobError(job, error);
	    logger.debug("Error of Job" + job.getId() + " id handled.");

	    synchronized (workingThread) {
		workingThread.notify();
	    }
	}
    }
}
