
/*
 * Main Manager, spawns Workers and assigns CrawlUrlJob instances to them
 * 
 */
package managers;

import jobs.CrawlPageJob;
import core.Crawler;
import workers.BaseWorker;

public class AcceptedPagesManager extends BaseManager 
{
	public AcceptedPagesManager( int numWorkers, int queueMultiplier) 
	{
		super("Accepted Pages Manager");
		
		this.setQueueSizeMultiplier( queueMultiplier );
		
		for (int i = 0; i < numWorkers; i++)
		{
			this.addWorker( new BaseWorker("Accepted Pages Worker #" + i) );
		}
	}
	
	/* Overloaded method, useful to tell when the work on the current site is about to end */
	@Override
	protected void beforeGoingToSleepNoJob()
	{
		/* if no job is possible and current session is going to end and a new one will be started */
		if (Crawler.getInstance().getFoundQueueSize() == 0)
		{
			Crawler.getInstance().startWorkonSeed();
		}
	}
}


