package com.wsc.crawler.init;

import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import org.apache.http.HttpHost;
import org.apache.http.client.methods.HttpGet;
import org.apache.log4j.Logger;
import com.wsc.crawler.grabber.LocalClient;
import com.wsc.crawler.grabber.URLBeanQueue;
import com.wsc.crawler.utils.CheckHost;
import com.wsc.crawler.utils.InternetConnection;
import com.wsc.exceptions.UnknownConfigOption;

//Import Enum
import com.wsc.crawler.Constants.available;
import com.wsc.crawler.WSCrawler;



public final class Initializer {

	private static Logger log = Logger.getLogger(Initializer.class.getName());
	private File lockfile;
	private boolean iscleanStopped = false;

	private enum SEED_URL_SOURCE {
		frontier, local_XML_file
	};

	private CrawlerConfig config = null;
	private URLBeanQueue queue;

	/**
	 * @param args
	 */

	public Initializer() {

		// If no Config object is suplied then create a default configuration
		// object.

		this.config = new CrawlerConfig();

		// checl internet connectivity.
		
		if(checkInternetConnection())
		{
		
		// check whether the previous instance is cleanly stopped or not.

		this.iscleanStopped = this.isCleanStopped();
 
		// if and only if the previous crawler stopped successfully, then create
		// lockfile and crawler.running file.
		if (this.iscleanStopped) {

			// Now create a lock file.

			this.createLockFile();
		}
	 
	} // if() check internet connection
		else {
			// If no internet Connection is Available then stop the current Instance of the Crawler.
			
			WSCrawler.stopInstance();
		}
	}

	private boolean checkInternetConnection() {
		log.debug("Checking whether InternetConnection is available or not.");
		
		String response = InternetConnection.isInternetAvialable();
		
		if(available.no.name().equals(response)){
			log.debug("Your System has No Internet connection.");
			log.debug("Exiting Crawler.");
			return false;
		}else if(available.yes.name().equals(response)){
			log.debug("Internet Connection is available.");
			log.debug("Continuing process.");
			return true;
		}else{
			log.debug("Internet Connection is available but partial/slow.");
			log.debug("Continuing process.");
			return true;
		}
	}

	public Initializer(CrawlerConfig config) {

		// Intialize Configuration with external configuration object suplied.

		this.setConfig(config);
		log.debug("Intializing crawler resources...");

		
		if(checkInternetConnection())
		{
		
		iscleanStopped = this.isCleanStopped();

		// if and only if the previous crawler stopped successfully, then create
		// lockfile and crawler.running file.
		if (this.iscleanStopped) {

			// Now create a lock file.

			this.createLockFile();

		}

	}else {
		// If no internet Connection is Available then stop the current Instance of the Crawler.
		
		WSCrawler.stopInstance();
		
	}
}

	private void createLockFile() {

		// create a lock file.

		lockfile = new File(config.getTmp_dir() + "/crawler.lock");

		// check whether the previous instance is stopped cleanly or not.

		if (this.iscleanStopped) {

			// If cleanly stopped, then create a new lockfile, for this session.
			log.debug("Creating lock file.");
			try {

				if (lockfile.createNewFile()) {

					log.info("Lockfile " + lockfile.getName()
							+ " created successfully.");
				}
			} catch (IOException e) {
				log.warn("Exception thrown while creating lock file", e);
			}
		} else {
			log.warn("Its looks like the previous instace of Wscrawler is stopped forcebly.");
		}

	}

	// this method intializes the frontier in WSCrawler

	public URLBeanQueue intializeFrotier() {

		// check whether the previous instace of crawler stopped cleanly or not.

		if (this.iscleanStopped) {

			log.info("previous instance of crawler is cleanly stopped");

			log.info("Getting URLS from Source, which is described in crawler-core.xml file");

			// Get Frontier urls from source specified in crawler-core.xml

			this.queue = getSeedUrlsFromSourceInConfig();

			return queue;

		} else {

			log.info("previous instance of crawler is forcebly stopped");
			log.info("Getting URLS from " + config.getXML_Filename()
					+ " from temp directory.");

			// Get Frontier urls from xml file stored in temp directory.

			this.queue = getURLsFromXMLFile();

			return queue;
		}
	}

	private URLBeanQueue getSeedUrlsFromSourceInConfig() {

		/*
		 * compare the seed url source option with the value in enum. if
		 * urlsource is frontier or empty, then this if condition is true.
		 */

		String seedUrlSource = config.getSeed_Urls_From();

		if (SEED_URL_SOURCE.frontier.toString().equals(seedUrlSource)) {

			log.info("Getting Seed urls from Frontier Server");

			// Now get URLs from Frontier server.

			queue = getURLsFromFrontier();

			// null check.

			if (queue == null) {
				log.info("returned URLQueue is null in initCrawler");
				return null;
			}

		} else if (SEED_URL_SOURCE.local_XML_file.toString().equals(
				seedUrlSource)) {

			// get seed urls from XML file in tmp dir.

			log.debug("Seed urls source is from XML file in tmp dir");

			// check for the null entry in config file

			queue = this.getURLsFromXMLFile();
			if (queue == null) {
				log.info("returned URLQueue is null.");
				return null;
			}

			// if seed urls not from both Frontier and xml file
		} else {

			// If the option is other than the values in enum,

			try {

				throw new UnknownConfigOption();

			} catch (UnknownConfigOption e) {

				log.warn("Seed URLs source is misconfigured");
				log.info("Geting URLs from default source, i.e Frontier Server");

				// then get URLS From default source.

				return getURLsFromFrontier();
			}
		}
		return queue;
	}

	// this method is called by intialize frontier method.

	private URLBeanQueue getURLsFromFrontier() {

		LocalClient client = new LocalClient(config);
		// read Frontier Host from config.
		URLBeanQueue tempqueue = null;
		HttpHost host;
		try {
			host = config.getFrontierHost();
			tempqueue = client.getUrlsFromFroniter();
			if (tempqueue == null) {
				// sleep crawler for 10sec, and retry n times mentioned in
				// config file.
				log.warn("Looks like Frontier server is not running");
				for (int i = 0; i < config.getRetry_attempts_to_server(); i++) {
					log.debug("Trying to connect Frontier Server...");
					// Sleep Crawler 30 sec
					this.sleepCrawler(30);
					if (CheckHost.isPortOpen(host)) {
						log.info(host.toURI() + "is Now Reachable.");
						// if port is open then return
						return client.getUrlsFromFroniter();
					}

				}
				log.error("Unable to Connect Frontier Server because, whether it is not running or not reachable");
				log.error("Exiting Crawler");
				System.exit(1);

			} else {
				// everything is fine
				log.debug("Frontier Server returned a Frontier of size "
						+ tempqueue.size());

			}
		} catch (UnknownHostException e) {
			log.error("Unknown Host exception throwed in getURLsFromFrontier",
					e);
		} catch (MalformedURLException e) {
			log.error("MalformedURLException throwed in getURLsFromFrontier", e);
		}
		// finally return tmpqueue
		return tempqueue;

	}

	public URLBeanQueue getURLsFromXMLFile() {
		File xmlfile = null;
		URLBeanQueue tempqueue = null;

		// null check
		if (config.getTmp_dir() != null) {
			xmlfile = new File(config.getTmp_dir() + "/"
					+ config.getXML_Filename());
			log.debug("Checking (" + config.getXML_Filename()
					+ ") existence in (" + config.getTmp_dir() + ") directory");
			if (xmlfile.exists()) {
				log.debug("xml file is found in " + config.getTmp_dir());

				// code here to read XML file and export urls to URLQueue object
				// and assign to tempqueue

			} else {
				log.warn(config.getXML_Filename() + " is not found in "
						+ config.getTmp_dir());
				log.info("Trying get URLs from Default URL Source, Frontier Server");
				tempqueue = getURLsFromFrontier();

			}
		} else {
			log.warn("Tmp_dir entry in crawlercore.xml is empty.");
			try {
				log.debug("Trying to connect Frontier Server at "
						+ config.getFrontierHost().toURI());
			} catch (UnknownHostException e) {
				log.warn("Unknown host exception found in getURLSFromXMLFile. MESG="
						+ e.getMessage());
			}
			// call getURLsFromFrontier method
			queue = this.getURLsFromFrontier();
			// check for null return
			if (queue == null) {
				log.info("returned URLQueue is null in initCrawler");
				log.info("Wating to get URLS from Frontier...");

				// retry requesting frontier Server, untill it'll get urls
				while (tempqueue != null) {
					this.sleepCrawler(30);
					tempqueue = this.getURLsFromFrontier();
				}
			}

		} // else
		return tempqueue;
	}

	public void sleepCrawler(long sec) {
		long timeinms = sec;
		long timeinsec = timeinms * 1000;
		log.info("Sleeping crawling for " + sec + "sec.");
		try {
			Thread.sleep(timeinsec);
		} catch (InterruptedException e) {
			log.warn("Thread sleep intrupted exception MESG=" + e.getMessage());
		}

	}

	public HttpGet instializeFSConnection() {

		HttpGet httpget;
		try {
			HttpHost host = config.getFrontierHost();
			httpget = new HttpGet(host.toURI());

		} catch (UnknownHostException e) {
			return null;
		}

		// return Httpget obj.
		return httpget;
	}

	public boolean instializeISConnection() {

		/*
		 * Check the reachability of Index Server, and make a connection to the
		 * Indexer Server and supply the connection object to the fetchURLs
		 * class. Return true if and only of the server is up and it is
		 * available to listen clients
		 */
		return true;
	}

	public void checkMemoryStatus() {

		/*
		 * Check the ram size and notify user and log the status, if it is
		 * critically low, If so then set the number of threads less than
		 * default.
		 */
	}

	public boolean isCleanStopped() {
		/*
		 * 
		 * Check the temp directory for crawler.lock file know whether the
		 * crawler is stopped forcedly or it is clean stopped.
		 */
		lockfile = new File(config.getTmp_dir() + "/crawler.lock");

		// lock file exists means that crawler is forcedly stopped.
		if (lockfile.exists()) {
			return false;
		} else {
			return true;
		}

	}

	public boolean isControllerUP() {
		/*
		 * make sum dummy requests to controller.
		 */
		return true;
	}

	public CrawlerConfig getConfig() {
		return config;
	}

	public void setConfig(CrawlerConfig config) {
		this.config = config;

	}

}
