package com.flute.icrawler.framework.framework;

import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;

import com.flute.icrawler.framework.framework.container.AdditionalUrlInformation;
import com.flute.icrawler.framework.framework.container.AdditionalUrlQueue;
import com.flute.icrawler.framework.framework.container.NumberStringValue;
import com.flute.icrawler.framework.framework.container.AdditionalUrlInformation.Additionals;

public class SortedCrawlUrlPool implements ICrawlUrlPool {

	/**
	 * 
	 */
	private static final long serialVersionUID = -1637672714072796272L;
	protected AdditionalUrlQueue waitQueue;
	protected ConcurrentLinkedQueue<String> sucessedQueue;
	protected ConcurrentLinkedQueue<String> failedQueue;
	protected ConcurrentLinkedQueue<String> processingQueue;
	protected ConcurrentLinkedQueue<String> retriedQueue;
	protected CrawlJob job;

	public SortedCrawlUrlPool() {
		this.waitQueue = new AdditionalUrlQueue();
		this.sucessedQueue = new ConcurrentLinkedQueue<String>();
		this.failedQueue = new ConcurrentLinkedQueue<String>();
		this.processingQueue = new ConcurrentLinkedQueue<String>();
		this.retriedQueue = new ConcurrentLinkedQueue<String>();
		if (job != null) {
			initialize();
		}

		try {
			UnicastRemoteObject.exportObject(this);
		} catch (RemoteException e) {
			e.printStackTrace();
		}
	}

	private void initialize() {
		for (Seed seed : job.getSettings().getSeeds()) {
			CrawlUrl url = new CrawlUrl(job, seed.getUrl());
			AdditionalUrlInformation info = new AdditionalUrlInformation();
			info.updateValue(Additionals.Deep, new NumberStringValue(1));
			info.updateValue(Additionals.Position, new NumberStringValue(
					getTotalUrlCount() + 1));
			url.setAdditionalUrlInformation(info);
			waitQueue.addCrawlUrl(url);
		}
	}

	public void setCrawlJob(CrawlJob job) {
		this.job = job;
		if (job != null) {
			initialize();
		}
	}

	public synchronized void add(CrawlUrl currentUrl, String url) {
		CrawlUrl newUrl = new CrawlUrl(job, url);

		AdditionalUrlInformation info = currentUrl
				.getAdditionalUrlInformation().newInstance();

		NumberStringValue value = (NumberStringValue) currentUrl
				.getAdditionalUrlInformation().getValue(Additionals.Deep);

		info.updateValue(Additionals.Deep, new NumberStringValue(value
				.getNumber().intValue() + 1));
		info.updateValue(Additionals.Position, new NumberStringValue(
				getTotalUrlCount() + 1));

		newUrl.setAdditionalUrlInformation(info);

		waitQueue.addCrawlUrl(newUrl);
	}

	public int getTotalUrlCount() {
		return waitQueue.size() + processingQueue.size() + sucessedQueue.size()
				+ failedQueue.size();
	}

	@Override
	public synchronized boolean contains(String url) {
		if (waitQueue.containsUrl(url)) {
			return true;
		} else if (sucessedQueue.contains(url)) {
			return true;
		} else if (failedQueue.contains(url)) {
			return true;
		} else if (processingQueue.contains(url)) {
			return true;
		}

		return false;
	}

	@Override
	public CrawlJob getCrawlJob() {
		return job;
	}

	@Override
	public synchronized boolean hasNext() {
		return waitQueue.size() > 0;
	}

	@Override
	public synchronized boolean isProcessing() {
		return processingQueue.size() > 0;
	}

	@Override
	public synchronized CrawlUrl nextUrl() {
		CrawlUrl url = waitQueue.getFirstCrawlUrl();
		if (url != null) {
			processingQueue.add(url.getUrl());
		}
		return url;
	}

	@Override
	public synchronized void retry(CrawlUrl url) {
		retriedQueue.add(url.getUrl());

		CrawlUrl newUrl = new CrawlUrl(job, url.getUrl());
		AdditionalUrlInformation info = url.getAdditionalUrlInformation()
				.newInstance();

		NumberStringValue value = (NumberStringValue) url
				.getAdditionalUrlInformation().getValue(Additionals.Deep);
		info.updateValue(Additionals.Deep, new NumberStringValue(value
				.getNumber().intValue()));
		info.updateValue(Additionals.Position, new NumberStringValue(
				getTotalUrlCount() + 1));

		newUrl.setAdditionalUrlInformation(info);

		waitQueue.addCrawlUrl(newUrl);
		processingQueue.remove(url.getUrl());
	}

	@Override
	public synchronized void success(CrawlUrl url) {
		sucessedQueue.add(url.getUrl());
		processingQueue.remove(url.getUrl());
	}

	@Override
	public synchronized void fail(CrawlUrl url) {
		failedQueue.add(url.getUrl());
		processingQueue.remove(url.getUrl());
	}

	@Override
	public synchronized void filter(CrawlUrl url) throws RemoteException {
		// TODO Auto-generated method stub
		processingQueue.remove(url.getUrl());
	}

	public synchronized void addUpdateUrl(CrawlUrl updateUrl) {
		updateUrl.setJob(job);
		this.waitQueue.addCrawlUrl(updateUrl);
	}


	@Override
	public void add(CrawlUrl url) throws RemoteException {
		// TODO Auto-generated method stub

	}

	@Override
	public List<CrawlUrl> nextUrl(int n) throws RemoteException {
		// TODO Auto-generated method stub
		return null;
	}

	@Override
	public void addAll(List<CrawlUrl> crawlUrls) throws RemoteException {
		// TODO Auto-generated method stub

	}

	@Override
	public void notityCrawlUrl(CrawlUrl url) throws RemoteException {
		// TODO Auto-generated method stub
		
	}

}
