/**
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.googlecode.crawlerextractor.crawler4j.frontier;

import java.util.List;

import org.apache.log4j.Logger;

import com.googlecode.crawlerextractor.crawler4j.crawler.Configurable;
import com.googlecode.crawlerextractor.crawler4j.crawler.CrawlConfig;
import com.googlecode.crawlerextractor.crawler4j.service.DocIdService;
import com.googlecode.crawlerextractor.crawler4j.service.WebURLService;
import com.googlecode.crawlerextractor.crawler4j.url.WebURL;

/**
 * @author Yasser Ganjisaffar <lastname at gmail dot com>
 */

public class Frontier extends Configurable {

	protected static final Logger logger = Logger.getLogger(Frontier.class
			.getName());
	protected final WebURLService webURLService = WebURLService.getInstance();
	protected final DocIdService docIdService = DocIdService.getInstance();
	protected WorkQueues workQueues;

	protected final Object mutex = new Object();
	protected final Object waitingList = new Object();

	protected boolean isFinished = false;

	protected long scheduledPages;

	protected DocIDServer docIdServer;

	public Frontier(final CrawlConfig config, final DocIDServer docIdServer) {
		super(config);

		this.docIdServer = docIdServer;

		workQueues = new WorkQueues();
		if (config.isResumableCrawling()) {
			webURLService.restoreURL();
		} else {
			logger.info("Cleal All DocID");
			docIdService.clearAll();
			logger.info("Cleal All WebURL");
			webURLService.clearAll();
			scheduledPages = 0;
		}

	}

	public void finish() {
		isFinished = true;
		synchronized (waitingList) {
			waitingList.notifyAll();
		}
	}

	public void getNextURLs(final int max, final List<WebURL> result) {
		while (true) {
			synchronized (mutex) {
				if (isFinished) {
					return;
				}
				final List<WebURL> curResults = webURLService.getNextURL(max);

				result.addAll(curResults);

				if (result.size() > 0) {
					return;
				}
			}
			try {
				synchronized (waitingList) {
					waitingList.wait();
				}
			} catch (final InterruptedException ignored) {
			}
			if (isFinished) {
				return;
			}
		}
	}

	public long getQueueLength() {
		return webURLService.getCountQueued();
	}

	public boolean isFinished() {
		return isFinished;
	}

	public void schedule(final WebURL url) {
		final int maxPagesToFetch = config.getMaxPagesToFetch();
		synchronized (mutex) {

			if (maxPagesToFetch < 0 || scheduledPages < maxPagesToFetch) {
				workQueues.put(url);
				scheduledPages++;

			}
			if (scheduledPages >= maxPagesToFetch) {
				logger.debug("Max Pages To Fetch done : " + scheduledPages);
			}

		}
	}

	public void scheduleAll(final List<WebURL> urls) {
		final int maxPagesToFetch = config.getMaxPagesToFetch();
		synchronized (mutex) {
			int newScheduledPage = 0;
			for (final WebURL url : urls) {
				if (maxPagesToFetch > 0
						&& (scheduledPages + newScheduledPage) >= maxPagesToFetch) {
					break;
				}

				workQueues.put(url);
				newScheduledPage++;
			}

			if (newScheduledPage > 0) {
				scheduledPages += newScheduledPage;

			}
			if (scheduledPages >= maxPagesToFetch) {
				logger.debug("Max Pages To Fetch done : " + scheduledPages);
			}
			synchronized (waitingList) {
				waitingList.notifyAll();
			}
		}

	}

	public void setProcessed(final WebURL webURL) {

		webURLService.endURL(webURL);

	}
}
