/*
 *  Copyright 2009 Lucas Nazário dos Santos
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */
package net.sourceforge.retriever.frontier;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;

/**
 * <p>
 * It's a pool of URLs with a FIFO behavior that doesn't allow URLs that already
 * passed by the frontier to be reinserted.
 * <p>
 * 
 * <p>
 * The use of this frontier with the crawler makes it to operate under a breadth-first
 * behavior.
 * </p>
 */
public class PoliteFrontier implements Frontier {


	private final List<String> freeIps = new LinkedList<String>();

	private final List<String> lockedIps = new LinkedList<String>();

	private final Map<String, List<FrontierUrl>> urlsByIp = new HashMap<String, List<FrontierUrl>>();

	private Map<String, Long> intervalBetweenFetchesInMillis = new HashMap<String, Long>();

	/**
	 * TODO Write javadoc
	 */
	public void enqueue(final FrontierUrl frontierURL) {
		final String ip = frontierURL.getIp();

		this.setIntervalBetweenFetches(frontierURL, ip);

		synchronized (this) {
			List<FrontierUrl> urls = this.urlsByIp.get(ip);

			if (urls == null) {
				urls = new LinkedList<FrontierUrl>();
				this.urlsByIp.put(ip, urls);
			}

			if (!this.lockedIps.contains(ip)) {
				this.add(freeIps, ip);
			}

			urls.add(frontierURL);
		}
	}

	/**
	 * TODO Write javadoc
	 */
	public synchronized List<FrontierUrl> dequeue() {
		while (this.cantDequeue()) {
			try {
				this.wait();
			} catch (final InterruptedException e) {
			}
		}

		while (this.hasFreeIPs()) {
			final String freeIp = this.freeIps.remove(0);
			this.add(this.lockedIps, freeIp);
			return this.urlsByIp.remove(freeIp);
		}

		return new ArrayList<FrontierUrl>();
	}

	/**
	 * TODO Write javadoc.
	 */
	public synchronized void notifyFetchedIp(final String ip) {
		this.lockedIps.remove(ip);

		if (this.urlsByIp.containsKey(ip)) {
			this.add(this.freeIps, ip);
		}

		this.notifyAll();			
	}

	/**
	 * TODO Write javadoc.
	 */
	public synchronized boolean hasURLs() {
		return this.urlsByIp.size() > 0;
	}

	/**
	 * TODO Write javadoc.
	 */
	public synchronized void reset() {
		this.freeIps.clear();
		this.lockedIps.clear();
		this.urlsByIp.clear();
	}

	/**
	 * TODO Write javadoc.
	 */
	public void setIntervalBetweenFetchesInMillis(final long time) {
		this.intervalBetweenFetchesInMillis.put("all", time);
	}

	/**
	 * TODO Write javadoc.
	 */
	public void setIntervalBetweenFetchesInMillis(final String ip, final long time) {
		this.intervalBetweenFetchesInMillis.put(ip, time);
	}

	private void setIntervalBetweenFetches(final FrontierUrl frontierURL, final String ip) {
		Long time = this.intervalBetweenFetchesInMillis.get(ip);
		if (time == null) {
			time = this.intervalBetweenFetchesInMillis.get("all");
		}

		if (time != null) {
			frontierURL.setTimeToSleepInMillis(time);
		}
	}

	private void add(final List<String> ips, final String ip) {
		if (!ips.contains(ip)) ips.add(ip);
	}

	private boolean cantDequeue() {
		return this.hasNotFreeIPs() && this.willHaveFreeIPsInTheFuture();
	}

	private boolean hasFreeIPs() {
		return this.freeIps.size() > 0;
	}

	private boolean willHaveFreeIPsInTheFuture() {
		return this.urlsByIp.size() > 0;
	}

	private boolean hasNotFreeIPs() {
		return this.freeIps.size() <= 0;
	}
}