
/*
 * 
 * Main Core of the crawler application, starts all threads, closes all threads and dies last. All the communication goes
 * through it's methods.
 * 
 */

package core;

import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
import java.util.Properties;

import console.Console;

import jobs.*;
import managers.*;
import database.ImageHandler;
import database.SqlManager;
import java.util.LinkedList;


public class Crawler extends Thread 
{
	private static Crawler mInstance;
	
	private volatile boolean mWorkingOnSite = false; //Is currently working
	private String mCurrentWebsite; // Current website it's crawling
	
	private FoundPagesManager mFpm; // manages found page URLS and marks them for visiting if they haven't been yet
	private AcceptedPagesManager mApm; // manages pages that need to be visited
	private ImageHandler mFim; // manages found image URLS and indexes them if they haven't been yet
	

	public static void main(String[] args)
	{
		getInstance().initialize("config.ini");
	}
	public static Crawler getInstance()
	{
		if (mInstance != null)
			return mInstance;
		else
		{
			mInstance = new Crawler();
			return mInstance;
		}
	}
	
	private Crawler() 
	{
		
	}
	
	public boolean initialize( String configFile )
	{
		System.out.println("Initializing...");
		Properties configFileHandle = new Properties();
		
		/* default values */
		String dbHost = "localhost";
		String dbName = "killerpixel";
		String dbUsername = "root";
		String dbPassword = "";
		
		int foundPagesWorkers = Config.VALIDATION_THREADS;
		int acceptedPagesWorkers = Config.CRAWLER_THREADS;
		/* END default values */
		
		/* overwrite values from config file */
		
		try 
		{
			FileInputStream in = new FileInputStream( configFile );
			configFileHandle.load(in);
			in.close();
			
			if (configFileHandle.getProperty("db_host") != null)
			{
				dbHost = configFileHandle.getProperty("db_host");
			}
			
			if (configFileHandle.getProperty("db_name") != null)
			{
				dbName = configFileHandle.getProperty("db_name");
			}
			
			if (configFileHandle.getProperty("db_password") != null)
			{
				dbPassword = configFileHandle.getProperty("db_password");
			}
			
			if (configFileHandle.getProperty("db_user") != null)
			{
				dbUsername = configFileHandle.getProperty("db_user");
			}
			
			if (configFileHandle.getProperty("threads") != null)
			{
				acceptedPagesWorkers = Integer.parseInt( configFileHandle.getProperty("threads") );
			}
			
		} 
		catch (IOException e)
		{
			System.out.println("Could not read config file " + configFile + "!");
			return false;
		}
		
		/* end config load */
		
		/* other initializations */
		mFpm = new FoundPagesManager( foundPagesWorkers, 30 ); 
		mApm = new AcceptedPagesManager( acceptedPagesWorkers, 10 );
		mFim = ImageHandler.getInstance();
		SeedHandler.getInstance();
		
		if (!SqlManager.getInstance().connect(dbHost, dbName, dbUsername, dbPassword))
		{
			System.out.println("Failed to connect to Mysql database, stopping ...");
			return false;
		}
		
		mFpm.start();
		mApm.start();
		mFim.start();
		SeedHandler.getInstance().start();
		
		
		this.start();
		
		return true;
	}
	
	/* thread body */
	public void run ()
	{
		System.out.println("Initialized.");
		Console.getInstance().start();
		try 
		{
			mFpm.join();
			mApm.join();
			mFim.join();
		}
		catch (InterruptedException e) 
		{
			e.printStackTrace();
		}
		System.out.println("Stopped.");
	}
	
	/* stops crawler and all threads */
	public void stopProcess ()
	{
		Console.getInstance().stopProcess();
		mFpm.stopProcess();
		mApm.stopProcess();
		ImageHandler.getInstance().stopProcess();
		SeedHandler.getInstance().stopProcess();
		
	}
	
	/* JOB ASSIGN METHODS */
	
	/* Adds found URL during crawling that belongs to the current website being crawled, the url will be evaluated if it's a page the workers should handle */ 
	public boolean addFoundPageUrl( URI pageURL )
	{
		String url = pageURL.getScheme() + "://" + pageURL.getAuthority();
		if (!url.equals(mCurrentWebsite))
		{
			addPotentialSeed(pageURL);
			return false;
		}
		
		return mFpm.addJob( new ValidatePageJob( pageURL ) );
	}
	
	/* Adds URL that belongs to the current website and will be crawled by the workers */
	public boolean addAcceptedPageUrl( URI pageURL )
	{
		if (CrawlPageJob.countJobs < Config.MAX_TRIES_ON_WEBSITE)
		{
			return mApm.addJob( new CrawlPageJob( pageURL ) );
		}
		return false;
	}
	
	/* Add image details to Image Handler queue, these images will be stored in the database */
	public boolean addFoundImageUrl( URI pageURL, URI imageURL, String pageTitle, String mimetype, int size, String sitelink )
	{
		return mFim.addImage( pageURL, imageURL, pageTitle, mimetype, size, sitelink );
	}
	
	/* Adds a potential seed to the ValidateQueue inside SeedHandler, if valid, it will become a seed */
	public void addPotentialSeed(URI url)
	{
		SeedHandler.getInstance().addValidateUrl(url);
	}
	
	/* Add a seed without checking it first, adds it directly to seed queue, may ovewrite queue limit */
	public void addForcedSeed(URI url)
	{
		SeedHandler.getInstance().addForcedSeed(url);
		
		if ( !mWorkingOnSite )
		{
			startWorkonSeed();
		}
	}
	
	/* JOBS END */
	
	/* Session control for the crawler, these methods make sure the crawler handles only one website at a time (scheme + authority) */
	
	/* Sets current session website */
	public void setCurrentWebsite(URI url)
	{
		if ( !mWorkingOnSite )
		{
			mCurrentWebsite = url.getScheme() + "://" + url.getAuthority();
		}
	}
	
	/* get current website it's crawling */
	public String getCurrentWebsite()
	{
		return mCurrentWebsite.toString();
	}
	
	/* Takes one seed from the seed queue and starts a new crawler session on it */
	public boolean startWorkonSeed()
	{
		if ( !mWorkingOnSite )
		{
			URI seed = SeedHandler.getInstance().getSeed();
			if (seed == null)
			{
				//System.out.println("No seed!");
				return false;
			}
			
			SqlManager.getInstance().deleteVisitedLinks();
			setCurrentWebsite(seed);
			SqlManager.getInstance().addVisitedSite(seed);
			CrawlPageJob.countJobs = 0;
			addAcceptedPageUrl(seed);
			
			return true;
		}
		
		return false;
	}
	
	/* Notifies the Accepted Pages Manager (Main Crawler) */
	public void notifyAcceptedPagesManager()
	{
		mApm.notifyUpdate();
	}
	
	/* Session control end */
		
	/* Count Methods, useful for statistics and other */
	
	public int getBusyCrawlWorkerCount ()
	{
		return mApm.getBusyWorkerCount();
	}
	
	public int getCrawlWorkerCount ()
	{
		return mApm.getWorkerCount();
	}
	
	public int getCrawlQueueSize()
	{
		return mApm.getQueueSize();
	}
	
	public int getCrawlQueueMaxSize()
	{
		return mApm.getQueueMaxSize();
	}
	
	public int getFoundQueueSize()
	{
		return mFpm.getQueueSize();
	}
	
	public int getFoundQueueMaxSize()
	{
		return mFpm.getQueueMaxSize();
	}
	
	public int getImageQueueSize()
	{
		return mFim.getQueueSize();
	}
	
	public int getTotalImagesThisSession()
	{
		return mFim.getTotalImages();
	}
}
