/* MarkusCrawler
 *
 * Copyright (C) 2011 by Markus '[mackan]' Hyttinen <mackanhyttinen@gmail.com>
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 *
 */

package me.crawler;

import java.net.*;
import java.util.*;

public class CrawlerQueue {
	
	
	protected Queue  <CrawlerHTMLPage> q;
	protected Set <String> retrievedLinks;
	protected Set <String> processedLinks;
	protected Integer interval;
	protected Integer process;
	protected Integer gathered;
	protected int maxLinks;
	protected int maxDepth;



	public CrawlerQueue() {
		q = new LinkedList<CrawlerHTMLPage>();
		retrievedLinks = new  HashSet<String>() ;
		processedLinks = new HashSet<String>();
		maxLinks = -1;	
		maxDepth = -1;
		process = 0;
		gathered = 0;
		
	}
	
	
	
	
	
	

	

	/**
	 * @return the maxDepth
	 */
	public synchronized int getMaxDepth() {
		return maxDepth;
	}


	/**
	 * @param maxDepth the maxDepth to set
	 */
	public synchronized void setMaxDepth(int maxDepth) {
		this.maxDepth = maxDepth;
	}


	/**
	 * @return the maxLinks
	 */
	public synchronized int getMaxLinks() {
		return maxLinks;
	}


	public void setMaxLinks(int _maxLinks) {
		maxLinks = _maxLinks;
	}


	public Set <String> getGatheredElements() {
		return retrievedLinks;
	}


	public Set <String> getProcessedLinks() {
		return processedLinks;
	}
	
	
	public synchronized void proccessed(){
		
		process++;
	}

	public synchronized int getProcessedSize() {
		
		return process;
		//return processedLinks.size();
	}


	public synchronized int getGatheredSize() {
		
		return gathered;
		//return retrievedLinks.size();	
	}
	
	
	public synchronized int size(){		
		return q.size();
	}

	public void incrMaxLinks(){
		if(maxLinks != -1)
			maxLinks++;
	}

	public synchronized CrawlerHTMLPage pop() {
		CrawlerHTMLPage page;

			if (q.size() == 0) 
				return null;
			else 
				page =  q.remove();
		return page;
	}

	public synchronized void push(CrawlerHTMLPage page) {
		
		
		if ((maxLinks != -1 && maxLinks <= retrievedLinks.size()) || (maxDepth != -1 && maxDepth < page.getDepth()) )
			return;
		
		try {
			if (retrievedLinks.add(page.getUrl().toURI().toString())){ 
				gathered++;
					q.add(page);	
			}
			 else 
				return ;
		} catch (URISyntaxException e) {
			return ;
		}
	}
	
	public synchronized boolean isEmpty(){
		
		if (q.isEmpty())
			return true;
		else
			return false;		
	} 
	
	public synchronized boolean full(){
		
		if(retrievedLinks.size()>= maxLinks && maxLinks != -1)
			return true;
		else 
			return false;
	} 
	
	public  synchronized CrawlerQueue clone(){
		
		CrawlerQueue clone = new CrawlerQueue();
		
		Iterator<CrawlerHTMLPage> i = q.iterator();
		while(i.hasNext()){
		clone.push((CrawlerHTMLPage)i.next());
		}
		
		return clone;
	}
	
	

	public synchronized void clear() {
		q.clear();
	}
	
	
	
}
