package sublinear.model.db;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Vector;
import java.util.logging.Logger;

import sublinear.common.Filter;
import sublinear.common.SimplePRNG;

/**
 * Data access object for the database representation of the Wikipedia graph
 * 
 * @author itay
 *
 */
public class DbGraphModel {
	
	
	private static final Logger log = Logger.getLogger( DbGraphModel.class.getName() );
	
	// Objects representing the DB records
	
	public interface DataRecord {
		// Empty interface
	}
	
	public static class Article implements DataRecord {
		
		public Article(long id, String title) {
			this.id = id;
			this.title = title;
		}
		
		public long id;
		public String title;
		
	}
	
	public static class Redirect implements DataRecord {
		
		public Redirect(long id, long destId) {
			this.id = id;
			this.destId = destId;
		}
		
		public long id;
		public long destId;
		
	}
	
	public static class Link implements DataRecord {
		
		public Link(long srcId, long destId) {
			this.srcId = srcId;
			this.destId = destId;
		}
		
		public long srcId;
		public long destId;
		
	}
	
	private final static int BATCH_SIZE = 1024;
	
	private Vector<Article> articlesBatch;
	private Vector<Link> linksBatch;
	private Vector<Redirect> redirectsBatch;
	
	public final static long UNRESOLVED = 0L;
	
	public DbGraphModel(Connection db) throws SQLException {
		this.db = db;

		// Do not auto-commit
		db.setAutoCommit(false);
		// Try to get quicker by putting DB journal and temporary 
		// storage in memory
		Statement st = db.createStatement();
		st.executeUpdate("PRAGMA journal_mode=MEMORY");
		st.executeUpdate("PRAGMA temp_store=MEMORY");
		st.close();
		db.commit();
		
		articlesBatch = new Vector<Article>(BATCH_SIZE);
		linksBatch = new Vector<Link>(BATCH_SIZE);
		redirectsBatch = new Vector<Redirect>(BATCH_SIZE);
		
		linksCache = new HashMap<Long, ArrayList<Long>>();
		isLinksCacheValid = false;
		
		invalidateArticleStats();
	}
	
	public void initializeWithExistingData() throws SQLException {	
		log.info("Initializing and resetting internal data");
		
		// Create (or recreate) statement objects
		createPreparedStatements();
		
		// Clean internal status
		articlesBatch.clear();
		linksBatch.clear();
		redirectsBatch.clear();
		linksCache.clear();
		invalidateArticleStats();	
		invalidateLinksCache();
	}
	
	public void rebuildDatabase() throws SQLException {		
		log.info("Rebuilding database");
		
		dropTables();
		createTables();
		
		initializeWithExistingData();
	}
	
	/**
	 * Drop all tables, effectively deletes all data
	 * 
	 * @throws SQLException
	 */
	public void dropTables() throws SQLException {
		
		log.info("Dropping all tables");
		dropTables(true, true, true);
	}
	
	/**
	 * Selectively delete tables
	 * 
	 * @param dropArticles Drop articles table if true
	 * @param dropRedirects Drop redirects table if true
	 * @param dropLinks Drop links table if true
	 * @throws SQLException
	 */
	public void dropTables(boolean dropArticles, boolean dropRedirects, boolean dropLinks) throws SQLException {
		
		log.info("Dropping tables - Articles: " + dropArticles + " Redirects: " + dropRedirects + " Links: " + dropLinks);
		
		Statement st = db.createStatement();
		// Delete in opposite direction of creation
		if (dropRedirects)
			st.execute("DROP TABLE IF EXISTS Redirects;");
		if (dropLinks)
			st.execute("DROP TABLE IF EXISTS Links;");
		if (dropArticles)
			st.execute("DROP TABLE IF EXISTS Articles;");
		
		st.close();
		db.commit();
		
		vacuumDb();
		
		if (dropArticles)
			invalidateArticleStats();
		if (dropLinks)
			invalidateLinksCache();
	}
	
	/**
	 * Run the VACUUM operation on the DB. Recommended after deleting a significant amount
	 * of data
	 * 
	 * @throws SQLException
	 */
	private void vacuumDb() throws SQLException {
		log.info("Performing DB VACUUM");
		
		// Cannot run it inside a transaction, so auto-commit must be enabled first
		db.setAutoCommit(true);
		db.createStatement().executeUpdate("VACUUM");
		db.setAutoCommit(false);		
	}
		
	private void createPreparedStatements() throws SQLException {
		insertArticleStmt = db.prepareStatement("INSERT INTO Articles(ID, Title) VALUES(?, ?)");
		insertLinkStmt = db.prepareStatement("INSERT INTO Links(SrcID, DestID) VALUES(?, ?)");
		
		getArticleByTitleStmt = db.prepareStatement("SELECT ID FROM Articles WHERE Title = ?");
		getArticleByIdStmt = db.prepareStatement("SELECT Title FROM Articles WHERE ID = ?");
		getLinkByIdStmt = db.prepareStatement("SELECT DestID FROM Links WHERE SrcID = ?");
		getAllLinkStmt = db.prepareStatement("SELECT SrcID, DestID FROM Links WHERE 1");
		getLinksStatsStmt = db.prepareStatement("SELECT COUNT(SrcID) AS cnt FROM Links");
		getArticlesStatsStmt = db.prepareStatement("SELECT MAX(ID) AS MAX, MIN(ID) AS MIN, COUNT(ID) AS COUNT FROM Articles");
		getRandomArticleIdStmt = db.prepareStatement("SELECT ID FROM Articles ORDER BY random() LIMIT 1");
		getRandomArticleIdFastStmt = db.prepareStatement("SELECT ID FROM Articles WHERE id >= ? LIMIT 1");
		getHighestDegreeArticleIdsStmt = db.prepareStatement("SELECT COUNT(SrcId) AS cnt FROM Links GROUP BY SrcId ORDER BY cnt DESC LIMIT 16");
		getAllArticlesStmt = db.prepareStatement("SELECT ID, Title FROM Articles WHERE 1");
		
		deleteArticleByIdStmt = db.prepareStatement("DELETE FROM Articles WHERE (ID = ?)");
		
		// Redirects table might be deleted...
		if (hasTable("Redirects")) {
			insertRedirectStmt = db.prepareStatement("INSERT INTO Redirects(ID, DestID) VALUES(?, ?)");
			getRedirectsStatsStmt = db.prepareStatement("SELECT COUNT(ID) AS cnt FROM Redirects");
			getAllRedirectIdsStmt = db.prepareStatement("SELECT ID FROM Redirects WHERE 1");
			getRedirectByIdStmt = db.prepareStatement("SELECT DestID FROM Redirects WHERE ID = ?");
		}
	}
	
	public void createTables() throws SQLException {
		
		// Since the work flow consists of a lot of insertions, followed
		// by working in read-only mode, indexes will be created only
		// after inserting all the relevant data
		
		final String CREATE_TABLE_ARTICLES = 
				"CREATE TABLE Articles (" + 
				"	ID INT NOT NULL," +
				"	Title TEXT NOT NULL" +
				");";
		final String CREATE_TABLE_LINKS = 
				"CREATE TABLE Links (" +
				"	SrcID INT NOT NULL, " +
				"	DestID INT NOT NULL" +
				");";
		final String CREATE_TABLE_REDIRECTS = 
				"CREATE TABLE Redirects (" +
				"	ID INT NOT NULL, " +
				"	DestID INT NOT NULL" +
				");";
		
		log.info("Creating tables");
		
		Statement st = db.createStatement();
		st.execute(CREATE_TABLE_ARTICLES);
		st.execute(CREATE_TABLE_LINKS);
		st.execute(CREATE_TABLE_REDIRECTS);
		st.close();
		db.commit();
		invalidateArticleStats();
		
		log.info("Table created");
	}
	
	private boolean hasTable(String name) throws SQLException {
		final String GET_TABLE_BY_NAME = "SELECT name FROM sqlite_master WHERE type='table' AND name=?";
		
		log.info("Checking existence for table " + name);
		
		boolean tableExists = false;
		PreparedStatement pst = db.prepareStatement(GET_TABLE_BY_NAME);
		pst.setString(1, name);
		ResultSet rs = pst.executeQuery();
		if (rs.next()) {
			tableExists = true;
		}
		rs.close();
		
		return tableExists;
	}
	
	public void deleteRedirectArticles() throws SQLException {
		log.info("Deleting all redirect articles");
		
		long redirectsCount;
		// How many do we delete before reporting
		long milestone; 
		long counter = 0;
		ResultSet rs1 = getRedirectsStatsStmt.executeQuery();
		if (rs1.next())
			redirectsCount = rs1.getLong("cnt");
		else
			throw new SQLException("Failed to fetch Redirects table size");
		rs1.close();
		
		// Make sure it's at least 1, otherwise we can get divide-by-zero error
		milestone = Math.max(1, (redirectsCount / 100));
	
		log.info("Dropping TitleIdx index");
		
		// Before deleting - drop the titles index
		db.createStatement().executeUpdate("DROP INDEX IF EXISTS TitleIdx");
		db.commit();
		
		log.info("Is about to delete " + redirectsCount + " redirection articles");
		
		ResultSet rs2 = getAllRedirectIdsStmt.executeQuery();
		while (rs2.next()) {
			long redirectArticleId = rs2.getLong(1);
			log.finest("Deleting article " + redirectArticleId);
			
			deleteArticleByIdStmt.setLong(1, redirectArticleId);
			deleteArticleByIdStmt.addBatch();
			
			if ((++counter % BATCH_SIZE) == 0) {
				log.finer("Commit deletion for " + BATCH_SIZE + " articles");
				deleteArticleByIdStmt.executeBatch();
				db.commit();
			}
			// Report progress
			if ((counter % milestone) == 0) {
				log.info("Deleted " + (counter) + "/" + redirectsCount + " (" + (counter / milestone) + "%)");
			}
		}
		rs2.close();

		// For the leftovers
		log.finer("Commit deletion for " + (counter % BATCH_SIZE) + " articles");
		deleteArticleByIdStmt.executeBatch();
		db.commit();
		
		// Create the titles index back
		db.createStatement().executeUpdate("CREATE INDEX TitleIdx ON Articles(Title)");
		db.commit();
		
		log.info("Deleted " + counter + " articles");
		
		vacuumDb();
	}
	
	public void deleteArticleById(long id) throws SQLException {
		log.finer("Deleting article " + id);
		
		deleteArticleByIdStmt.setLong(1, id);
		deleteArticleByIdStmt.executeUpdate();
		
		db.commit();
		invalidateArticleStats();
	}
	
	public void createArticlesIndexes() throws SQLException {
		final String CREATE_ARTICLES_TITLE_INDEX = "CREATE INDEX TitleIdx ON Articles(Title)";
		final String CREATE_ARTICLES_ID_INDEX = "CREATE UNIQUE INDEX IdIdx ON Articles(ID)";
		
		log.info("Creating indexes on the articles table...");
		
		db.createStatement().execute(CREATE_ARTICLES_TITLE_INDEX);
		db.createStatement().execute(CREATE_ARTICLES_ID_INDEX);
		
		db.commit();
		
		log.info("Done creating indexes on the articles table");
	}
	
	public void createLinksIndexes() throws SQLException {
		final String CREATE_LINKS_INDEX = "CREATE INDEX SrcIDIdx ON Links(SrcID)";
		
		db.createStatement().execute(CREATE_LINKS_INDEX);
	}
	
	public void createRedirectsIndexes() throws SQLException {
		final String CREATE_REDIRECTS_ID_INDEX = "CREATE UNIQUE INDEX ID ON Redirects(ID)";
		
		db.createStatement().execute(CREATE_REDIRECTS_ID_INDEX);
	}	
	
	public void flushArticles() throws SQLException {
		log.finer("Inserting " + articlesBatch.size() + " artice records");
		
		for (Article article : articlesBatch) {
			insertArticleStmt.setLong(1, article.id);
			insertArticleStmt.setString(2, article.title);
			insertArticleStmt.addBatch();
		}
		insertArticleStmt.executeBatch();
		db.commit();
		invalidateArticleStats();
		
		articlesBatch.clear();
	}
	
	public void insertArticle(Article article) throws SQLException {
		articlesBatch.addElement(article);
		
		if (articlesBatch.size() >= BATCH_SIZE) {
			flushArticles();
		}		
	}
	
	public void flushRedirects() throws SQLException {
		log.finer("Inserting " + redirectsBatch.size() + " redirect records");
		
		for (Redirect redir : redirectsBatch) {
			insertRedirectStmt.setLong(1, redir.id);
			insertRedirectStmt.setLong(2, redir.destId);
			insertRedirectStmt.addBatch();
		}
		insertRedirectStmt.executeBatch();
		db.commit();
		
		redirectsBatch.clear();
	}	
	
	public void insertRedirect(Redirect redir) throws SQLException {
		redirectsBatch.addElement(redir);
		
		if (redirectsBatch.size() >= BATCH_SIZE) {
			flushRedirects();
		}				
	}		

	public void flushLinks() throws SQLException {
		log.finer("Inserting " + linksBatch.size() + " link records");
		
		for (Link link : linksBatch) {
			insertLinkStmt.setLong(1, link.srcId);
			insertLinkStmt.setLong(2, link.destId);
			insertLinkStmt.addBatch();
		}
		insertLinkStmt.executeBatch();
		db.commit();
		
		linksBatch.clear();
	}	
	
	public void insertLink(Link link) throws SQLException {
		linksBatch.addElement(link);
		
		if (linksBatch.size() >= BATCH_SIZE) {
			flushLinks();
		}				
	}
	
	public long getArticleByTitle(String title) throws SQLException {
		long articleId = UNRESOLVED;
		
		getArticleByTitleStmt.setString(1, title);
		ResultSet rs = getArticleByTitleStmt.executeQuery();
		if (rs.next()) {
			articleId = rs.getLong(1);
		}
		rs.close();
		
		return articleId;
	}
	
	public String getArticleById(long id) throws SQLException {
		String articleTitle = null;
		
		if (isFastMode()) {
			articleTitle = Long.toString(id);			
		} else {
			getArticleByIdStmt.setLong(1, id);
			ResultSet rs = getArticleByIdStmt.executeQuery();
			if (rs.next()) {
				articleTitle = rs.getString(1);
			}
			rs.close();
		}
		
		log.finest("Retrieved article " + id + ": " + (articleTitle != null ? articleTitle : "<Not found>"));
		
		return articleTitle;
	}
	
	public long getRedirectById(long id) throws SQLException {
		long redirectDest = UNRESOLVED;
		
		getRedirectByIdStmt.setLong(1, id);
		ResultSet rs = getRedirectByIdStmt.executeQuery();
		if (rs.next()) {
			redirectDest = rs.getLong(1);
		}
		rs.close();
		
		return redirectDest;
	}
	
	public boolean isRedirect(long id) throws SQLException {
		return (getRedirectById(id) != UNRESOLVED);
	}
	
	public long resolveArticleByTitle(String title) throws SQLException {
		long articleId = getArticleByTitle(title);
		
		log.finest("Looking up article by title '" + title + "'. First result: " + articleId);
		
		articleId = resolveRedirections(articleId);
		
		log.finer("Resolved article by title '" + title + "': " + articleId);

		return articleId;
	}	
	
	public long resolveRedirections(long id) throws SQLException {
		// Used to detect redirect loops
		HashSet<Long> visited = new HashSet<Long>();

		long currentId = id;
		long redirectDest = UNRESOLVED;
		boolean redirectFound = false;
		
		do {
			redirectFound = false;
			redirectDest = getRedirectById(currentId);
			if (redirectDest != UNRESOLVED) {
				log.finer("Following redirection: " + currentId + " => " + redirectDest);
				currentId = redirectDest;
				redirectFound = true;
			}
			
			// If there was a redirection, make sure we didn't visit this article before
			if (redirectFound && visited.contains(currentId)) {
				log.fine("Redirection loop found, starting at " + id + ", abort lookup");
				return UNRESOLVED;
			}
			visited.add(currentId);
			
		} while (redirectFound);
		
		return currentId;				
	}
	
	/**
	 * Returns all outgoing links originating at the given article ID.
	 * All links are assumed to be to existing articles; links to redirection
	 * pages are assumed to be resolved during the pre-processing phase. 
	 * They are also assumed unique.
	 * 
	 * @param id ID of article to search
	 * @return List of article IDs which are linked from the provided article
	 * @throws SQLException 
	 * 
	 * @throws SQLException
	 */
	public ArrayList<Long> getLinksById(long id) throws SQLException {
		
		ArrayList<Long> linkDestinations = new ArrayList<Long>();
		
		// If we have cache. We must check for existence, as we might
		// have an article with no outgoing links
		if (isLinksCacheValid) {
			if (linksCache.containsKey(id))
				return linksCache.get(id);
			else
				// Return an empty vector, as this article has no links
				return linkDestinations;
		} 
		
		long destId = UNRESOLVED;

		getLinkByIdStmt.setLong(1, id);
		ResultSet rs = getLinkByIdStmt.executeQuery();
		while (rs.next()) {
			destId = rs.getLong(1);
			linkDestinations.add(destId);
		}
		rs.close();
		
		return linkDestinations;
	}
	
	public void getGraphDegree() throws SQLException {
		ResultSet rs = getHighestDegreeArticleIdsStmt.executeQuery();
		while (rs.next()) {
			long id = rs.getLong("SrcId");
			long count = rs.getLong("cnt");
			String title = getArticleById(id);
			log.info("Article " + title + " (" + id + ") has " + count + " outgoing links");
		}
		rs.close();
	}
	
	public long getArticleDegree(long id) throws SQLException {
		if (isLinksCacheValid) {
			ArrayList<Long> links = linksCache.get(id);
			if (links != null) {
				log.finer("Degree for article " + id + " was found in cache: " + links.size());
				return links.size();
			} else {
				log.finer("No outgoing links for article " + id + " - degree is 0");
				return 0;
			}
		} 
		
		long degree = getLinksById(id).size(); 
		log.finer("Degree for article " + id + " was found in DB: " + degree);
		return degree;
	}
	
	public void buildLinksCache() throws SQLException {
		log.info("Build links cache - starting");
		
		int counter = 0;
		
		ResultSet rs = getAllLinkStmt.executeQuery();
		while (rs.next()) {
			long srcId = rs.getLong("SrcID");
			long destId = rs.getLong("DestID");
			
			ArrayList<Long> dests = linksCache.get(srcId);
			if (dests == null) {
				dests = new ArrayList<Long>(1);
				linksCache.put(srcId, dests);
			}
			dests.add(destId);
			++counter;
			if ((counter % (1024 * 1024)) == 0) {
				log.finer("" + 1024 * 1024 + " links read to cache...");
			}
		}
		rs.close();
		isLinksCacheValid = true;
		
		log.info("Build links cache - done");
	}
	
	public long getMinArticleId() throws SQLException {
		updateArticlesStats();
		
		return minArticleId;
	}
	
	public long getMaxArticleId() throws SQLException {
		updateArticlesStats();
		
		return maxArticleId;
	}
	
	public long getArticlesCount() throws SQLException {
		updateArticlesStats();
		
		return articlesCount;
	}
	
	public long getRandomArticleId() throws SQLException {
		long randomId = UNRESOLVED;
		ResultSet rs = getRandomArticleIdStmt.executeQuery();
		if (rs.next()) {
			randomId = rs.getLong(1);
		}
		rs.close();
		
		return randomId;
	}
	
	public long getRandomArticleIdFast() throws SQLException {
		// Much faster, but requires knowing the max ID and assuming uniform distribution
		// Calculate a random number between min ID to max ID, and choose the first one
		// equal or larger than it
		updateArticlesStats();
		long range = (maxArticleId - minArticleId) + 1;
		long randomId = SimplePRNG.getInstance().nextLong(range) + minArticleId; 
		long randomIdRes = UNRESOLVED;
		
		getRandomArticleIdFastStmt.setLong(1, randomId);
		ResultSet rs = getRandomArticleIdFastStmt.executeQuery();
		if (rs.next()) {
			randomIdRes = rs.getLong(1);
		}
		rs.close();
		
		return randomIdRes;

	}
			
	private void updateArticlesStats() throws SQLException {
		if (!isArticlesStatsValid) {
			log.info("Updating articles table min ID, max ID and total row count...");
			
			ResultSet rs = getArticlesStatsStmt.executeQuery();
			if (rs.next()) {
				maxArticleId = rs.getLong("MAX");
				minArticleId = rs.getLong("MIN");
				articlesCount = rs.getLong("COUNT");
				isArticlesStatsValid = true;
				
				log.info("Done updating. Min: " + minArticleId + " max: " + maxArticleId + 
						" total row count: " + articlesCount);
			}
			rs.close();
		} 
	}
		
	private void invalidateArticleStats() {
		isArticlesStatsValid = false;
		
		log.finest("Cached articles table information was invalidated");
	}	
	
	private void invalidateLinksCache() {
		linksCache.clear();
		isLinksCacheValid = false;		
	}
	
	public boolean isFastMode() {
		return isFastMode;
	}

	public void setFastMode(boolean isFastMode) {
		log.info("Set fast mode: " + (isFastMode ? "yes" : "no"));
		
		this.isFastMode = isFastMode;
	}
	
	public AdjacencyList buildAdjacencyList(Filter<Article> filter) throws SQLException {
		updateArticlesStats();
		
		long counter = 0;
		long filteredCounter = 0;
		long totalRequired = articlesCount;
		// Make sure it's at least 1, otherwise we can get divide-by-zero error
		long milestone = Math.max(1, (articlesCount / 100));
		
		AdjacencyList adjList = new AdjacencyList();
		log.info("Building adjacency list");
		log.info("Is about to insert " + articlesCount + " articles...");
		
		ResultSet rsArticles = getAllArticlesStmt.executeQuery();
		while (rsArticles.next()) {
			long id = rsArticles.getLong(1);
			String title = rsArticles.getString(2);
			if (filter.accept(new Article(id, title))) {
				adjList.addArticle(id, title);
			}
			else
			{
				log.finest("Filtered article " + title + " (" + id + ")");
				++filteredCounter;
			}
			
			++counter;
			if ((counter % milestone) == 0) {
				log.info("Processed " + counter + "/" + totalRequired + " articles (" + (counter / milestone) + "%)");
			}
		}
		rsArticles.close();
		
		log.info("" + (counter - filteredCounter) + " articles added to list. Filtered: " + filteredCounter);
		counter = 0;
		filteredCounter = 0;
		
		long linksCount;
		ResultSet rsLinksSize = getLinksStatsStmt.executeQuery();
		if (rsLinksSize.next())
			linksCount = rsLinksSize.getLong("cnt");
		else
			throw new SQLException("Failed to fetch Links table size");
		rsLinksSize.close();
		// Make sure it's at least 1, otherwise we can get divide-by-zero error
		milestone = Math.max(1, (linksCount / 100));
		totalRequired = linksCount;
		
		log.info("Is about to insert " + linksCount + " links...");
		
		ResultSet rsLinks = getAllLinkStmt.executeQuery();
		while (rsLinks.next()) {
			long srcId = rsLinks.getLong("SrcID");
			long destId = rsLinks.getLong("DestID");
			
			if (!adjList.addLinkIfValid(srcId, destId)) {
				++filteredCounter;
			}
			++counter;
			if ((counter % milestone) == 0) {
				log.info("Processed " + counter + "/" + totalRequired + " links (" + (counter / milestone) + "%)");
			}

		}
		rsLinks.close();

		log.info("" + (counter - filteredCounter) + " links added to list. Filtered: " + filteredCounter);
		
		return adjList;
	}

	private Connection db = null;
	
	// Insert statements
	private PreparedStatement insertArticleStmt = null;
	private PreparedStatement insertRedirectStmt = null;
	private PreparedStatement insertLinkStmt = null;
	
	// Lookup statements
	private PreparedStatement getArticleByIdStmt = null;
	private PreparedStatement getArticleByTitleStmt = null;
	private PreparedStatement getAllArticlesStmt = null;
	private PreparedStatement getRedirectByIdStmt = null;
	private PreparedStatement getLinkByIdStmt = null;
	private PreparedStatement getAllLinkStmt = null;
	private PreparedStatement getLinksStatsStmt = null;
	private PreparedStatement getRandomArticleIdStmt = null;
	private PreparedStatement getRandomArticleIdFastStmt = null;
	private PreparedStatement getArticlesStatsStmt = null;
	private PreparedStatement getHighestDegreeArticleIdsStmt = null;
	private PreparedStatement getAllRedirectIdsStmt = null;
	private PreparedStatement getRedirectsStatsStmt = null;
	
	// Delete statements
	private PreparedStatement deleteArticleByIdStmt = null;
	
	// Internal cache of frequently used values
	private boolean isArticlesStatsValid;
	private long maxArticleId;
	private long minArticleId;
	private long articlesCount;
	
	// Internal cache of the whole links table
	private HashMap<Long, ArrayList<Long>> linksCache = null;
	private boolean isLinksCacheValid = false;
	
	// Configuration
	private boolean isFastMode = false;


}