/**
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.openness.crawler.frontier;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.openness.crawler.crawler.Configurable;
import com.openness.crawler.crawler.CrawlConfig;
import com.openness.crawler.hbase.HBaseUtil;
import com.openness.crawler.hbase.Row;
import com.openness.crawler.lock.DistributedLock;
import com.openness.crawler.url.WebURL;
import com.openness.crawler.util.Constants;

public class DocIDServer extends Configurable {

	private static final Logger LOGGER = LoggerFactory
			.getLogger(DocIDServer.class);

	private ZooKeeper zk;

	private DistributedLock lock;

	public DocIDServer(ZooKeeper zk, CrawlConfig config) throws Exception {
		super(config);

		LOGGER.info("DocIDServer initialize");

		this.zk = zk;

		this.lock = new DistributedLock(zk, Constants.ZK_LOCK_DOCID,
				Constants.ZK_LOCK_PREFIX);

		if (config.isResumableCrawling()) {
			long docCount = getDocCount();

			if (docCount > 0) {
				LOGGER.info("Loaded " + docCount
						+ " URLs that had been detected in previous crawl.");

				setLastDocID(docCount);
			}
		} else {
			setLastDocID(0);
		}

		LOGGER.info("DocIDServer initialize success");
	}

	private long getLastDocID() {
		try {
			byte[] data = zk.getData(Constants.ZK_NODE_LASTDOCID, false, null);

			return Bytes.toLong(data);
		} catch (Exception e) {
			throw new RuntimeException(
					"DocIDServer getLastDocID zk getData error: "
							+ e.toString());
		}
	}

	private void setLastDocID(long lastDocID) {
		if (lastDocID < 0) {
			return;
		}

		try {
			zk.setData(Constants.ZK_NODE_LASTDOCID, Bytes.toBytes(lastDocID),
					-1);
		} catch (Exception e) {
			throw new RuntimeException(
					"DocIDServer getLastDocID zk getData error: "
							+ e.toString());
		}
	}

	/**
	 * Returns the docid of an already seen url.
	 * 
	 * @param url
	 *            the URL for which the docid is returned.
	 * @return the docid of the url if it is seen before. Otherwise -1 is
	 *         returned.
	 */
	public long getDocId(String url) {
		lock.lock();

		try {
			Map<String, byte[]> result = HBaseUtil.getRow(
					Constants.HBASE_TABLE_DOCIDSERVER, Bytes.toBytes(url));

			if (result != null && result.size() > 0) {
				return Bytes.toLong(result
						.get(Constants.HBASE_TABLE_DOCID_QUALIFIER_DOCID));
			}
		} catch (IOException e) {
			LOGGER.error("DocIDServer getDocId error: " + e.toString());
		} finally {
			lock.unlock();
		}

		return -1;
	}

	public List<Long> getDocIds(List<WebURL> urls) {
		List<Long> results = new ArrayList<Long>(urls.size());

		List<byte[]> rows = new ArrayList<byte[]>();

		for (int index = 0; index < urls.size(); index++) {
			results.add(-1L);

			rows.add(Bytes.toBytes(urls.get(index).getURL()));
		}

		lock.lock();

		try {
			List<Map<String, byte[]>> queryResults = HBaseUtil.getRows(
					Constants.HBASE_TABLE_DOCIDSERVER, rows);

			for (int index = 0; index < urls.size(); index++) {
				Map<String, byte[]> result = queryResults.get(index);

				if (result != null && result.size() > 0) {
					results.set(index, Bytes.toLong(result
							.get(Constants.HBASE_TABLE_DOCID_QUALIFIER_DOCID)));
				}
			}
		} catch (IOException e) {
			LOGGER.error("DocIDServer getDocIds error: " + e.toString());
		} finally {
			lock.unlock();
		}

		return results;
	}

	public long getNewDocID(String url) {
		lock.lock();

		try {
			long docid = getDocId(url);

			if (docid > 0) {
				return docid;
			}

			long lastDocID = getLastDocID();

			lastDocID++;

			setLastDocID(lastDocID);

			HBaseUtil.putRow(Constants.HBASE_TABLE_DOCIDSERVER,
					Bytes.toBytes(url),
					Constants.HBASE_TABLE_DOCID_QUALIFIER_DOCID,
					Bytes.toBytes(lastDocID));

			return lastDocID;
		} catch (Exception e) {
			LOGGER.error("DocIDServer getNewDocID error: " + e.toString());
		} finally {
			lock.unlock();
		}

		return -1;
	}

	public List<Long> getNewDocIDs(List<WebURL> urls) {
		lock.lock();

		List<Long> results = new ArrayList<Long>();

		for (int index = 0; index < urls.size(); index++) {
			results.add(-1L);
		}

		try {
			List<Long> docids = getDocIds(urls);

			long lastDocID = getLastDocID();

			for (int index = 0; index < urls.size(); index++) {
				if (docids.get(index) > 0) {
					continue;
				}

				results.set(index, ++lastDocID);
			}

			setLastDocID(lastDocID);

			List<Row> rows = new ArrayList<Row>();

			for (int index = 0; index < urls.size(); index++) {
				if (results.get(index) > 0) {
					Row row = new Row(Bytes.toBytes(urls.get(index).getURL()));

					row.add(Constants.HBASE_TABLE_DOCID_QUALIFIER_DOCID,
							Bytes.toBytes(urls.get(index).getDocid()));

					rows.add(row);
				}
			}

			HBaseUtil.putRows(Constants.HBASE_TABLE_DOCIDSERVER,
					rows.toArray(new Row[rows.size()]));
		} catch (Exception e) {
			LOGGER.error("DocIDServer getNewDocID error: " + e.toString());
		} finally {
			lock.unlock();
		}

		return results;
	}

	public void addUrlAndDocId(String url, long docId) throws Exception {
		lock.lock();

		try {
			long lastDocID = getLastDocID();

			if (docId <= lastDocID) {
				throw new Exception("Requested doc id: " + docId
						+ " is not larger than: " + lastDocID);
			}

			// Make sure that we have not already assigned a docid for this URL
			long prevDocid = getDocId(url);

			if (prevDocid > 0) {
				if (prevDocid == docId) {
					return;
				}

				throw new Exception("Doc id: " + prevDocid
						+ " is already assigned to URL: " + url);
			}

			setLastDocID(docId);

			HBaseUtil.putRow(Constants.HBASE_TABLE_DOCIDSERVER,
					Bytes.toBytes(url),
					Constants.HBASE_TABLE_DOCID_QUALIFIER_DOCID,
					Bytes.toBytes(lastDocID));
		} catch (Exception e) {

		} finally {
			lock.unlock();
		}

	}

	public boolean isSeenBefore(String url) {
		return getDocId(url) != -1;
	}

	public long getDocCount() {
		try {
			return HBaseUtil.getMaximumLongValueInFirstQualifier(
					Constants.HBASE_TABLE_DOCIDSERVER, null, null);
		} catch (Throwable e) {
			LOGGER.error("DocIDServer getDocCount error: " + e.toString());
		}

		return -1;
	}

	public void sync() {
	}

	public void close() {
	}

}
