/*
 * @(#)ODP.java	0.1 04/02/06
 *
 * Copyright 2006 Fabio Gasparetti. All rights reserved.
 */

package org.tabularium.text.se.odp;

import java.io.*;
import java.util.*;
import java.util.logging.*;
import java.util.zip.GZIPInputStream;
import java.net.URL;
import org.tabularium.text.se.*;
import org.tabularium.net.downloader.*;
import org.tabularium.net.UrlNormalizer;
import org.tabularium.net.linkdb.LinkDb;
import org.tabularium.net.linkdb.UrlIterator;
import org.tabularium.text.html.*;
import org.tabularium.util.ProgressMonitor;
import org.tabularium.util.XmlPreferences;

/**
 * A singleton that provides basic functionalities to manage a DMOZ rdf file.
 * 
 * loadOPD download index
 */
public class ODP implements ProgressMonitor {
	class Listener implements org.tabularium.net.downloader.DownloaderListener {
		protected RawStorer storer;
		int storedResources = 0;

		protected Logger logger = Logger.getLogger(Listener.class.getName());

		public Listener(RawStorer storer) {
			this.storer = storer;
		}

		public void downloadCompleted(DownloadEvent parm1) {
			/** @todo think about escaping urls */
			Resource res = parm1.getResource();
			String ct = res.getContentType();
			if (("text/html".equalsIgnoreCase(ct))
					|| ("text/plain".equalsIgnoreCase(ct))) {
				try {
					logger.info("Store page: " + res.getURL());
					// System.out.println(new String(res.getObject()));
					String url = res.getURL();
					// normalize
					// url = UrlNormalizer.normalize(url);
					storer.store(url, res.getObject(),0);
					storedResources++;
				} catch (Exception ex) {
					logger.severe("RawStorer exception: " + ex.toString());
				}
			}
			// gui
		}

		public void error(DownloadEvent parm1) {
		}

		public void queueEmpty(DownloadEvent parm1) {
		}
	}

	// default values
	// public static final int MAX_DOWNLOAD_THREADS = 2;
	//
	// public static final int MAX_ATTEMPTS = 10;
	//
	// public static final int MAX_RESOURCE_SIZE = 100000;
	//
	// public static final int TIME_OUT = 10000;
	//
	private ODPStorer storer;

	// private RawStorer rawPageStorer = null;

	private String filename = null;

	private Logger logger = Logger.getLogger(ODP.class.getName());

	private long currentProgress = 0l;
	private boolean taskDone = true;

	public ODP(ODPStorer odpStorer) {
		this.storer = odpStorer;
		// this.rawPageStorer = rawPageStorer;
		// downloaderListener = new DownloaderListener(rawPageStorer, false);
	}

	/**
	 * Stores all the topics and related links within maxLevel depth If maxLevel
	 * is 0, it loads all the entries.
	 * 
	 * Note: The storer will be cleared. Note: currentProgress gets value from 0
	 * to the number of log's entries.
	 */
	public synchronized void loadODP(String fn, int maxLevel) throws Exception {
		this.loadODP(fn, maxLevel, false, false);
	}

	/**
	 * Same as loadODP(String fn, int maxLevel) but with filtering of no english
	 * web sites, e.g.,
	 * 
	 * Top/Adult/World/Fracais/Arts/Ecriture_en_ligne
	 * Top/World/Afrikaans/Besigheid/Uitgewers_en_Boeke TopAdult/World/Arabic/
	 * 
	 * and compression of alphabetical-categories
	 * 
	 * Top/Adult/Arts/Animation/Anime/Image_Galleries/Free/A
	 * Top/Adult/Arts/Celebrities/A Top/Adult/World/Bulgarian/...
	 * Top/Adult/Shopping/Clothing/Lingerie/1
	 **/
	public synchronized void loadODP(String fn, int maxLevel,
			boolean filterNoEnglish, boolean compress) throws Exception {

		currentProgress = 0l;
		taskDone = false;
		if (maxLevel <= 0)
			maxLevel = Integer.MAX_VALUE;
		// reset the storer
		storer.clear();
		filename = fn;
		FileInputStream is = new FileInputStream(filename);
		GZIPInputStream gzis = new GZIPInputStream(is);
		InputStreamReader isr = new InputStreamReader(gzis, "UTF8");
		BufferedReader reader = new BufferedReader(isr);
		String line = null;
		ODPEntry entry = null;
		while ((line = reader.readLine()) != null) {
			// System.out.println(line);
			int i, j;
			if ((i = line.indexOf(" about=\"")) > 0) {
				entry = new ODPEntry();
				j = line.indexOf("\">", i + 8);
				if (j == -1)
					continue;
				entry.link = line.substring(i + 8, j);
			} else if (line.indexOf("</ExternalPage>") >= 0) {
				if ((entry.link == null) || (entry.link.length() == 0))
					continue;
				int level = 0;
				i = 0;
				for (i = 0; (i = entry.topic.indexOf('/', i + 1)) > 0; level++)
					;
				String s = null;
				// removes alphabetic (e.g. /A/) or numeric (e.g. /8/)
				// categories in topics
				if (compress) {
					entry.topic = entry.topic.replaceAll(
							"([/][A-Za-z0-9][/])|([/][A-Za-z0-9]$)", "/");
				}
				// removes no english web sites
				if (!filterNoEnglish
						|| !entry.topic.matches(".*[/]World([/].*|$)")) {
					if (level <= maxLevel)
						storer.add(entry);
				}
				// gui update
				currentProgress--;
			} else if ((i = line.indexOf("<d:Title>")) > 0) {
				j = line.indexOf("</d:Title>", i + 9);
				if (j == -1)
					continue;
				entry.title = line.substring(i + 9, j);
			} else if ((i = line.indexOf("<d:Description>")) > 0) {
				j = line.indexOf("</d:Description>", i + 15);
				if (j == -1)
					continue;
				entry.pagedescription = line.substring(i + 15, j);
			} else if ((i = line.indexOf("<topic>")) > 0) {
				j = line.indexOf("</topic>", i + 7);
				if (j == -1)
					continue;
				entry.topic = line.substring(i + 7, j);
			}
		}
		reader.close();
		isr.close();
		gzis.close();
		is.close();
		taskDone = true;
	}

	public void clear() throws ODPStorerException {
		storer.clear();
	}

	/**
	 * Downloads and stores all the pages linked by DMOZ category. Note: The
	 * downloader parameter has to be previously started.
	 * 
	 * @throws IOException
	 */
	public void download(RawStorer rawPageStorer, Downloader downloader,
			String root) throws Exception {
		// gui
		taskDone = false;
		currentProgress = 0l;

		Iterator iter = storer.browse(root);
		long tot = storer.size();
		int maxBuf = 50, nBuf = 0;
		ArrayList urls = new ArrayList();
		long ndone = 0;

		Listener l = new Listener(rawPageStorer);
		downloader.addListener(l);
		while (iter.hasNext()) {
			ODPEntry e = (ODPEntry) iter.next();
			ndone++;
			currentProgress = (int) (-100d * ((double) ndone / tot));

			if (rawPageStorer.retrieve(e.link,0) != null) {
				logger.info("Skipping " + e.link
						+ " link because already downloaded.");
				continue;
			}
			urls.add(e.link);
			if (iter.hasNext() && (++nBuf < maxBuf))
				continue;

			downloader.addURLs(urls);
			downloader.waitEmptyQueue();
			downloader.clearQueue();

			urls = new ArrayList();
			nBuf = 0;
		}
		logger.info("Stored pages: " + l.storedResources);
		downloader.removeListener(l);
		// logger.info("start downloading " + urls.size() + " resources");
		taskDone = true;
	}

	public void download(RawStorer rawPageStorer, Downloader downloader)
			throws Exception {
		this.download(rawPageStorer, downloader, "Top/");
	}

	/**
	 * Throws IOException by RawPageSTorer and SearchEngine.
	 */
	public void index(RawStorer rawPageStorer, SearchEngine se,
			ContentExtractor ce) throws IOException {
		taskDone = false;
		currentProgress = 0l;

		DocIterator iter = rawPageStorer.browse();
		long tot = rawPageStorer.size();
		logger.info("Number of docs to index: " + tot);
		int ndone = 0;

		while (iter.hasNext()) {
			String id = (String) iter.next();
			if (se.isIndexed(id)) {
				logger
						.info("Skipping " + id + " "
								+ "because already indexed.");
				continue;
			}
			String page = new String(iter.getObj());
			ndone++;
			try {
				//System.out.println("indexing " + id);
				String text = ce.extractText(id, page);
				se.index(id, text);
			} catch (Exception ex) {
				logger.info("Parsing/indexing exception on " + id + ": "
						+ ex.toString());
				ex.printStackTrace();
			}
			currentProgress = (int) (-100d * ((double) ndone / tot));
		}
		iter.interrupt();
		taskDone = true;
	}

	/**
	 * Throws IOException by RawPageSTorer and SearchEngine.
	 */
	public void index(RawStorer rawPageStorer, SearchEngine se,
			ContentExtractor ce, String root) throws IOException,
			ODPStorerException {
		taskDone = false;
		currentProgress = 0l;

		Iterator iter = storer.browse(root);
		long tot = storer.size();
		long ndone = 0l;

		String p;
		List<byte[]> bb;
		while (iter.hasNext()) {
			ODPEntry e = (ODPEntry) iter.next();

			ndone++;
			currentProgress = (long) ((double) ndone / (double) tot * 100d);

			if (se.isIndexed(e.link)) {
				logger.info("Skipping " + e.link + " "
						+ "because already indexed.");
				continue;
			}

			byte[] b = rawPageStorer.retrieve(e.link,0);
			if (b == null)
				continue;
			try {
				p = ce.extractText(e.link, new String(b));
			} catch (Exception e1) {
				this.logger.info("exception parsing " + e.link + " :"
						+ e1.toString());
				continue;
			}
			se.index(e.link, p);
		}
		taskDone = true;
		// DocIterator iter = rawPageStorer.browse();
		// long tot = rawPageStorer.size();
		// logger.info("Number of docs to index: " + tot);
		// int ndone = 0;
		//		
		// taskDone = false;
		// currentProgress = 0l;
		// while (iter.hasNext()) {
		// String id = (String) iter.next();
		// if (se.isIndexed(id)) {
		// logger
		// .info("Skipping " + id + " "
		// + "because already indexed.");
		// continue;
		// }
		//			
		// String page = new String(iter.getObj());
		// ndone++;
		//			
		// try {
		// System.out.println("indexing " + id);
		// String text = ce.extractText(id, page);
		// se.index(id, text);
		// } catch (Exception ex) {
		// logger.info("Parsing/indexing exception on " + id + ": "
		// + ex.toString());
		// ex.printStackTrace();
		// }
		// currentProgress = (int) (-100d * ((double) ndone / tot));
		// }
		// iter.interrupt();
		// taskDone = true;
	}

	/**
	 * Index only titles and descriptions. Search engine ids correspond to urls.
	 */
	public void indexRDF(SearchEngine se, String root) throws IOException,
			ODPStorerException {
		taskDone = false;
		currentProgress = 0l;
		//System.out.println("indexrdf;"+root);
		Iterator iter = storer.browse(root);
		long tot = storer.size();
		long ndone = 0l;

		while (iter.hasNext()) {
			ODPEntry e = (ODPEntry) iter.next();
			ndone++;
			currentProgress = (long) ((double) ndone / (double) tot * 100d);
			//System.out.println(e.id);
			if (se.isIndexed(e.id)) {
				logger.info("Skipping " + e.id + " "
						+ "because already indexed.");
				continue;
			}
			se.index(e.id, e.title + " " + e.pagedescription);
		}
		taskDone = true;
	}

	public void indexRDF(SearchEngine se) throws IOException,
			ODPStorerException {
		this.indexRDF(se, "Top/");

	}

	/**
	 * Select a random leaf from the ODP category with a number of links above a
	 * given threshold.
	 */
	public String randomLeaf(int minLinks) throws ODPStorerException {
		return randomLeaf(minLinks, 0, "Top/");
	}

	/**
	 * Selects a random topic from the ODP leafs. If maxLinks > 0, the leafs
	 * with a number of links less than maxLinks are ignored.
	 */
	public String randomLeaf(int minLinks, int maxLinks, String root)
			throws ODPStorerException {
		HashSet tabu = new HashSet();
		String topics[] = storer.topics(root);
		if (topics.length < 0)
			return null;
		String s = null;
		String[] ss; // temp
		while (tabu.size() < topics.length) {
			s = topics[(int) (Math.random() * topics.length)];
			if (tabu.contains(s)) // skip topics already done
				continue;
			ss = storer.childs(s);
			if (ss.length != 0) {// it is not a leaf
				tabu.add(s);
				continue;
			}
			ss = storer.links(s);
			if (ss.length < minLinks) {
				tabu.add(s);
				continue;
			}
			if (maxLinks > 0 && ss.length > maxLinks) {
				tabu.add(s);
				continue;
			}
			break;
		}
		return s;
	}

	public String[] leafs(String root) throws ODPStorerException {
		return storer.leafs(root);
		// ArrayList leafs = new ArrayList();
		// String topics[] = storer.topics(root);
		// if (topics.length > 0)
		// while (tabu.size() < topics.length) {
		// s = topics[(int) (Math.random() * topics.length)];
		// if (tabu.contains(s)) // skip topics already done
		// continue;
		// ss = storer.childs(s);
		// if (ss.length != 0) {// it is not a leaf
		// tabu.add(s);
		// continue;
		// }
		// ss = storer.links(s);
		// if (ss.length < minLinks) {
		// tabu.add(s);
		// continue;
		// }
		// if (maxLinks > 0 && ss.length > maxLinks) {
		// tabu.add(s);
		// continue;
		// }
		// break;
		// }
		// return (String[])leafs.toArray(new String[] {});
	}

	/**
	 * Select a random topic from ODP with a number of links above a given
	 * threshold.
	 */
	public String randomTopic(int minLinks) throws ODPStorerException {
		return randomTopic(minLinks, 0, 0, "Top/");
	}

	/**
	 * Starting from a random leaf in a given tree root, goes up till it founds
	 * a topic with enough links.
	 * 
	 * @throws ODPStorerException
	 */
	public String randomTopicFromLeafs(int minLinks, String root)
			throws ODPStorerException {
		String[] leafs = storer.leafs(root);
		String topic = leafs[(int) (leafs.length * Math.random())];
		// HashSet listEntries = new HashSet();
		int currNumLinks = 0;
		// topic already defined
		while (currNumLinks < minLinks) {
			// at the beginning we collect entries from leafs
			ODPEntryIterator iter = this.recursiveEntriesByTopicIterator(topic);
			currNumLinks = iter.size();
			if (currNumLinks < minLinks) {
				String s = ODP.parent(topic);
				if (s.equals(root)) {
					topic = null;
					break;
				}
				topic = s;
			}
		}
		return topic;
		// outputToArff(DEFAULT_ARFF_FN, odp, listEntries);
		// logger.info("Chosen topic: "+topic+" with number of links (targets) "+listEntries.size());
	}

	/**
	 * Selects a random topic. If maxLinks > 0, categories with a number of
	 * links less than maxLinks are ignored. If d > 0, categories at a depth < d
	 * are ignored (d = 0, all depths are allowable, d = 1 depths under "Top/",
	 * etc..
	 */
	public String randomTopic(int minLinks, int maxLinks, int d, String root)
			throws ODPStorerException {
		String topics[] = storer.topics(root);
		if (topics.length < 0)
			return null;
		String s, ret = null;
		String[] ss; // temp
		int idx = (int) (Math.random() * topics.length);
		int first_idx = idx;
		while (idx - first_idx < topics.length) {
			s = topics[idx % topics.length];
			if (ODP.topicDepth(s) < d) {
				idx++;
				continue;
			}
			ss = storer.childs(s);
			if (ss.length != 0) {// it is not a leaf
				idx++;
				continue;
			}
			ss = storer.links(s);
			if (ss.length < minLinks) {
				idx++;
				continue;
			}
			if (maxLinks > 0 && ss.length > maxLinks) {
				idx++;
				continue;
			}
			ret = s;
			break;
		}
		return ret;
	}

	/**
	 * Selects a set of random topics. If maxLinks > 0, categories with a number
	 * of links less than maxLinks are ignored. If d > 0, categories at a depth
	 * < d are ignored (d = 0, all depths are allowable, d = 1 depths under
	 * "Top/", etc..
	 */
	public String[] randomTopics(int num, int minLinks, int maxLinks, int d,
			String root) throws ODPStorerException {
		ArrayList ret = new ArrayList();
		String topics[] = storer.topics(root);
		HashSet tabu = new HashSet();
		if (topics.length < 0)
			return null;		
		String s;
		String[] ss; // temp
		while ((tabu.size() < topics.length) && (ret.size() < num)) {
			int idx = (int) (Math.random() * topics.length);
			int first_idx = idx;
			while (idx - first_idx < topics.length) {
				s = topics[idx % topics.length];
				if (ret.contains(s)) {
					tabu.add(s);
					idx++;
					continue;
				}
				if (ODP.topicDepth(s) < d) {
					tabu.add(s);
					idx++;
					continue;
				}
				ss = storer.childs(s);
				if (ss.length != 0) {// it is not a leaf
					tabu.add(s);
					idx++;
					continue;
				}
				ss = storer.links(s);
				if (ss.length < minLinks) {
					tabu.add(s);
					idx++;
					continue;
				}
				if (maxLinks > 0 && ss.length > maxLinks) {
					tabu.add(s);
					idx++;
					continue;
				}
				tabu.add(s);
				ret.add(s);
				break;
			}
		}
		return (String [])ret.toArray(new String[] {});

		// HashSet tabu = new HashSet();
		// String topics[] = storer.topics(root);
		// if (topics.length < 0)
		// return null;
		// String s, ret = null;
		// String[] ss; // temp
		// while (tabu.size() < topics.length) {
		// s = topics[(int) (Math.random() * topics.length)];
		// if (tabu.contains(s)) // skip topics already done
		// continue;
		// if (ODP.topicDepth(s) < d) {
		// tabu.add(s);
		// continue;
		// }
		// ss = storer.childs(s);
		// if (ss.length != 0) {// it is not a leaf
		// tabu.add(s);
		// continue;
		// }
		// ss = storer.links(s);
		// if (ss.length < minLinks) {
		// tabu.add(s);
		// continue;
		// }
		// if (maxLinks > 0 && ss.length > maxLinks) {
		// tabu.add(s);
		// continue;
		// }
		// ret = s;
		// break;
		// }
		// return ret;
	}

	// /**
	// * No longer used. It basically returns a random topic in the deepest
	// level of ODP.
	// */
	// public String _randomLeaf(int minLinks, int maxLinks, String root) throws
	// ODPStorerException {
	// String topics[] = storer.childs(root);
	// if (topics.length < 0)
	// return null;
	//
	// String ret = null;
	// int maxDepth = 0;
	// for (int i = 0; i < Math.min(topics.length, 100); i++) {
	// String s = topics[i];
	// int j = 0, depth = 0;
	// while ((j = s.indexOf('/', j + 1)) > 0)
	// depth++;
	// if (depth > maxDepth)
	// depth = maxDepth;
	// }
	//
	// while (true) {
	// String s = topics[(int) (Math.random() * topics.length)];
	// int j = 0, depth = 0;
	// while ((j = s.indexOf('/', j + 1)) > 0)
	// depth++;
	// if (depth == maxDepth) {
	//
	// Iterator iter = storer.browse(s);
	// int n = 0;
	// while (iter.hasNext()) {
	// Object item = (Object) iter.next();
	// n++;
	// }
	// if (n >= minLinks) {
	// ret = s;
	// break;
	// }
	// }
	// }
	// return ret;
	// }

	/**
	 * No longer used. It basically returns a links of a random topic in the
	 * deepest level of ODP. Doesnt work anylonger... storer.browse(s) is
	 * changed!
	 */
	public ODPEntry[] _randomLeafEntries(int minLinks)
			throws ODPStorerException {
		String topics[] = storer.topics();
		if (topics.length < 0)
			return null;

		int maxDepth = 0;
		for (int i = 0; i < Math.min(topics.length, 100); i++) {
			String s = topics[i];
			int j = 0, depth = 0;
			while ((j = s.indexOf('/', j + 1)) > 0)
				depth++;
			if (depth > maxDepth)
				maxDepth = depth;
		}

		ArrayList entries = new ArrayList();
		while (true) {
			String s = topics[(int) (Math.random() * topics.length)];
			int j = 0, depth = 0;
			while ((j = s.indexOf('/', j + 1)) > 0)
				depth++;
			if (depth == maxDepth) {
				Iterator iter = storer.browse(s);
				int n = 0;
				while (iter.hasNext()) {
					entries.add((ODPEntry) iter.next());
					n++;
				}
				if (n >= minLinks)
					break;
				entries.clear();
			}
		}
		return (ODPEntry[]) entries.toArray(new ODPEntry[] {});
	}

	/**
	 * Selects a random topic with a given depth and returns its links. Depth 0
	 * corresponds to Top level.
	 * 
	 * If d = 0, all the depths are allowable.
	 * 
	 * @param minLinks
	 *            int
	 * @throws IOException
	 * @return ODPEntry[]
	 */
	public ODPEntry[] randomTopicEntries(int minLinks, int d)
			throws ODPStorerException {
		String topics[] = storer.topics();
		if (topics.length < 0)
			return null;

		ODPEntry[] entries = null;
		int ntopics = 0;
		while (true) {
			String s = topics[(int) (Math.random() * topics.length)];
			int j = 0, depth = 0;
			while ((j = s.indexOf('/', j + 1)) > 0)
				depth++;
			if (depth == d) {
				entries = storer.entries(s);
				if (entries.length >= minLinks)
					break;
			}
		}
		return entries;
	}

	/**
	 * Selects a random topic from the ODP leafs.
	 */
	public String randomLeaf() throws ODPStorerException {

		String topics[] = storer.topics();
		if (topics.length < 0)
			return null;

		String s = topics[(int) (Math.random() * topics.length)];
		while (true) {
			String[] ss = storer.childs(s);
			if (ss.length == 0)
				break;
			s = ss[(int) (Math.random() * ss.length)];
		}
		return s;
	}

	/**
	 * Select a specific topic and returns its entries
	 * 
	 * @param topic
	 *            String
	 * @throws IOException
	 * @return ODPEntry[]
	 */
	public ODPEntry[] entriesByTopic(String topic) throws ODPStorerException {
		return storer.entries(topic);
	}

	/**
	 * Same as entriesByTopic but collects also descendants. No longer used.
	 */
	public ODPEntry[] _recursiveEntriesByTopic(String topic)
			throws ODPStorerException {
		List l = new ArrayList();
		l = recursiveEntriesCollector(topic, l);
		return (ODPEntry[]) l.toArray(new ODPEntry[] {});
	}

	protected List recursiveEntriesCollector(String topic, List l)
			throws ODPStorerException {
		String[] childs = this.childs(topic);
		for (int i = 0; i < childs.length; i++) {
			System.out.println("child:" + childs[i]);
			l = recursiveEntriesCollector(childs[i], l);
		}
		// System.out.println("add:"+topic);
		Iterator iter = storer.browse(topic);
		while (iter.hasNext())
			l.add((ODPEntry) iter.next());

		return l;
	}

	/**
	 * Same as entriesByTopic but collects also descendants.
	 * 
	 * @deprecated a new version returns an interator
	 */
	public ODPEntry[] recursiveEntriesByTopic(String topic)
			throws ODPStorerException {
		ArrayList l = new ArrayList();
		Iterator iter = storer.browse(topic);
		while (iter.hasNext()) {
			ODPEntry e = (ODPEntry) iter.next();
			l.add((ODPEntry) iter.next());
		}
		return (ODPEntry[]) l.toArray(new ODPEntry[] {});
	}

	/**
	 * Same as entriesByTopic but collects also descendants.
	 */
	public ODPEntryIterator recursiveEntriesByTopicIterator(String topic)
			throws ODPStorerException {
		return storer.browse(topic);
	}

	/**
	 * Select a specific topic and returns its topic direct descendants.
	 * 
	 * @param topic
	 *            String
	 * @throws IOException
	 * @return ODPEntry[]
	 */
	public String[] childs(String topic) throws ODPStorerException {
		String[] ret = storer.childs(topic);
		return ret;
	}


	/**
	 * Select a specific topic and returns its topic all the descendants.
	 * 
	 * @param topic
	 *            String
	 * @throws IOException
	 * @return ODPEntry[]
	 */
	public String[] recursiveChilds(String topic) throws ODPStorerException {
		ArrayList l = new ArrayList();
		String[] ret = storer.childs(topic);
		for (int i = 0; i < ret.length; i++) {
			l.add(ret[i]);
			l.addAll(Arrays.asList(recursiveChilds(ret[i])));
		}
		return (String[])l.toArray(new String[] {});
	}
	
	/**
	 * @param topic
	 * @return
	 */
	public static String parent(String topic) {
		int idx = topic.lastIndexOf("/");
		return idx > 0 ? topic.substring(0, idx) : null;
	}

	/**
	 * Returns the ancestor topic at the given depth (0 means top). Ex.
	 * topic=Top/A1/B2/C3 depth=2 => Returns Top/A1/B2
	 */
	public static String ancestor(String topic, int d) {
		int idx = -1, lastIdx;
		for (int i = 0; i <= d; i++) {
			lastIdx = idx;
			idx = topic.indexOf("/", lastIdx + 1);
			if (idx == -1)
				break;
		}
		return idx == -1 ? topic : topic.substring(0, idx);
	}

	/**
	 * True if t2 is included in t1. Ex. t1 = Top/A1 and t2 = Top/A1/B2/C3
	 */
	public static boolean included(String t1, String t2) {
		return t2.startsWith(t1);
	}

	public String[] topics() throws ODPStorerException {
		return storer.topics();
	}

	/**
	 * Returns the full ODPEntry related to a link.
	 */
	public ODPEntry entryByLink(String link) throws ODPStorerException {
		return storer.get(link);
	}

	/**
	 * Returns the most specified ancestor in common between two topics. For
	 * Example: t1 = Top/Adult/Arts/Movies/Multimedia/Audio and t2
	 * =Top/Adult/Arts/Movies/Cultures_and_Groups/African returns
	 * Top/Adult/Arts/Movies.
	 * 
	 * @param t1
	 * @param t2
	 * @return
	 */
	public static String commonAncestor(String t1, String t2) {
		int len1, len2;
		len1 = t1.length();
		len2 = t2.length();
		int n = Math.min(len1, len2);
		int i;
		for (i = 0; (i < n) && (t1.charAt(i) == t2.charAt(i)); i++)
			;
		if (i == len1 && i == len2)
			return t1;
		i = Math.min(i, t1.lastIndexOf('/', i));
		return t1.substring(0, i);
	}

	/**
	 * Returns the depth of the topic (0 means the top level). Ex. Top/Adult =>
	 * 1 level.
	 * 
	 * @param t
	 * @return
	 */
	public static int topicDepth(String t) {
		return t.replaceAll("[^/]", "").length();
	}

	/**
	 * True if the two topics are siblings (share the same direct ancestor).
	 * 
	 * @param t1
	 * @param t2
	 * @return
	 */
	public static boolean siblings(String t1, String t2) {
		return ODP.parent(t1).equals(ODP.parent(t2));
	}

	public long getCurrent() {
		return this.currentProgress;
	}

	public Object getTaskId() {
		return null;
	}

	public boolean taskDone() {
		return this.taskDone;
	}

	public static void main(String[] args) throws Exception {
		// System.out.println(ODP.topicDepth("Top/Adult/Arts/Movies/Multimedia/Audio/"));
		System.out.println(ODP.ancestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio", 0));
		System.out.println(ODP.ancestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio", 1));
		System.out.println(ODP.ancestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio", 2));
		System.out.println(ODP.ancestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio", 3));
		System.out.println(ODP.ancestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio", 4));
		System.out.println(ODP.ancestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio", 5));
		System.out.println(ODP.ancestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio", 6));
		System.out.println(ODP.commonAncestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio",
				"Top/Adult/Arts/Movies/Cultures_and_Groups/African"));
		System.out.println(ODP.commonAncestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio",
				"Top/Adult/Arts/Movies/Multimedia/Audio"));
		System.out.println(ODP.commonAncestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio",
				"Top/Adult/Arts/Movies/Multimedia/AudioX"));
		System.out.println(ODP.commonAncestor(
				"Top/Adult/Arts/Movies/Multimedia/Audio",
				"Top/Adult/Arts/Movies/Multimedia/Audio/X"));
		System.out.println(ODP
				.topicDepth("Top/Adult/Arts/Movies/Multimedia/Audio"));

		// int d = ODP.depth("Top/Games/Board_Games/Abstract/");

		MySqlODPStorer odpStorer = new MySqlODPStorer();
		try {
			odpStorer.init("com.mysql.jdbc.Driver",
					"jdbc:mysql://127.0.0.1/odp?user=root",
					"jdbc:mysql://127.0.0.1/mysql?user=root", "odp", false);
		} catch (Exception ex) {
			System.err.println(ex.toString());
			ex.printStackTrace();
			System.exit(1);
		}
		ODP odp = new ODP(odpStorer);		
		odp.randomTopic(100, 1, 0, "Top/Computers/E-Books/Readers");
		// odp.loadODP("/Users/fabiogasparetti/projects/fc-eval/content.rdf.u8.gz",
		// 0, true, true);
		String[] ss = odp.childs("Top/Arts");
		for (int i = 0; i < ss.length; i++)
			System.out.println(ss[i]);
		String s = odp.randomLeaf();
		System.out.println(s);
		while (true) {
			s = odp.parent(s);
			if (s == null)
				break;
			System.out.println(s);
		}
		// odp.loadODP(fn, level);
		// odp.download(rawPageStorer, odpDownlo5ader);
		// odp.index(rawPageStorer, odpSearchEngine, contentExtractor);

		// Downloader downloader = new Downloader();
		// downloader.setMaxSize(XmlPreferences.getInt("MaxResourceSize",
		// 1000000));
		// downloader.setMaxAttempts(XmlPreferences.getInt("MaxAttempts",
		// 3));
		// downloader.setFollowRedirect(true);
	}

}