/*
 * Copyright 2011-2013, David George, Licensed under the Apache License,
 * Version 2.0 (the "License"); you may not use this file except in compliance
 * with the License. You may obtain a copy of the License at
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 * License for the specific language governing permissions and limitations under
 * the License.
 */
package org.magneato.service;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeSet;
import java.util.concurrent.ExecutorService;

import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;

import org.apache.commons.lang.WordUtils;
import org.apache.lucene.analysis.KeywordAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.search.SearcherWarmer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.util.Version;
import org.magneato.dao.KVStore;
import org.magneato.dao.KVStoreFactory;
import org.magneato.dto.FacetItem;
import org.magneato.dto.KVResults;
import org.magneato.dto.Page;
import org.magneato.dto.PageACL;
import org.magneato.dto.Result;
import org.magneato.dto.SearchResults;
import org.magneato.infra.NameFormatException;
import org.magneato.utils.RepositoryException;
import org.magneato.utils.Template;
import org.magneato.utils.parsers.FacetIndexerParser;
import org.magneato.utils.parsers.WikiParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.security.access.annotation.Secured;
import org.springframework.stereotype.Service;
import org.xml.sax.InputSource;

import com.browseengine.bobo.api.BoboBrowser;
import com.browseengine.bobo.api.BoboIndexReader;
import com.browseengine.bobo.api.Browsable;
import com.browseengine.bobo.api.BrowseFacet;
import com.browseengine.bobo.api.BrowseHit;
import com.browseengine.bobo.api.BrowseRequest;
import com.browseengine.bobo.api.BrowseResult;
import com.browseengine.bobo.api.BrowseSelection;
import com.browseengine.bobo.api.FacetAccessible;
import com.browseengine.bobo.api.FacetSpec;
import com.browseengine.bobo.api.FacetSpec.FacetSortSpec;
import com.browseengine.bobo.facets.FacetHandler;
import com.browseengine.bobo.facets.impl.SimpleFacetHandler;

/**
 * Service layer for the KVStore/Lucene indexer to enable Magneato Pages to
 * provide services such as faceted browsing of content, searching etc.
 * 
 * @author David George
 */
@Service("Repository")
public class Repository {
	@Value("${repo.dir}")
	private String repoDir;
	@Value("${umask}")
	private long umask;
	@Value("${uowner}")
	private String uowner;
	@Value("${ugroup}")
	private String ugroup;
	@Value("${comments.max}")
	private int maxComments;

	private HashMap<String, KVStore<Page>> storeMap;
	@Autowired
	private KVStoreFactory<Page> storeFactory;
	@Autowired
	private TemplateManager templateMgr;
	@Autowired
	private UserManager userManager;

	private SearcherManager searchManager;
	private IndexWriter indexWriter;

	private static final Logger _logger = LoggerFactory
			.getLogger(Repository.class);

	StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_35);

	/**
	 * Sets up a search index in the repo.dir and initializes the kv store map.
	 * KV Stores are created "on demand" depending on the namespace.
	 * 
	 * TODO Note that the index is global so we can search across namespaces for
	 * references (UUIDs). Hmmm this sounds like a dumb idea, because we'll have
	 * trouble with facets non? unless we prefix with the namespace.
	 * 
	 * @throws CorruptIndexException
	 * @throws LockObtainFailedException
	 * @throws IOException
	 */
	@PostConstruct
	public void initialize() throws CorruptIndexException,
			LockObtainFailedException, IOException {
		Directory index = new NIOFSDirectory(new File(repoDir));
		indexWriter = new IndexWriter(index, new IndexWriterConfig(
				Version.LUCENE_35, analyzer));
		indexWriter.commit(); // create segment files for indexReader.open()

		boolean applyAllDeletes = true;
		ExecutorService es = null;
		searchManager = new SearcherManager(indexWriter, applyAllDeletes,
				new MySearchWarmer(), es);

		storeMap = new HashMap<String, KVStore<Page>>();
	}

	/**
	 * @param nameSpace
	 * @return Null if there is a problem creating the store.
	 */
	private KVStore<Page> getStore(String nameSpace) {
		KVStore<Page> store = storeMap.get(nameSpace);

		if (store == null) {
			// check if template dir exists for namespace
			templateMgr.getTemplateGroup(nameSpace);
			try {
				synchronized (storeFactory) {
					store = storeFactory.getInstance();
					storeMap.put(nameSpace, store);
					store.init(repoDir + "/" + nameSpace + "/treap",
							maxComments, new PagePKHelper());
				}
			} catch (Exception e) {
				_logger.error("Can't create repo for namespace " + nameSpace
						+ " because " + e.getLocalizedMessage());
				throw new RepositoryException(
						"Can't create repo for namespace " + nameSpace
								+ " because " + e.getLocalizedMessage());
			}
		}

		return store;
	}

	/**
	 * Get the Access Control List for a page, if uri is null return default
	 * umask need to know owner and group
	 */
	public PageACL getPageACL(String uri) {
		PageACL acl = new PageACL();
		if (uri == null || uri.isEmpty()) {
			// use defaults
			acl.setPermissions(umask);
			acl.setOwner(uowner);
			acl.setGroup(ugroup);
		} else {
			Page p = get(uri);
			acl.setPermissions(p.getPerms());
			// TODO if we delete a user, than someone recreates that user we
			// have a problem as they will own all the old user's pages.
			try {
				String userName = userManager.getUserName(p.getAuthor());
				acl.setOwner(userName);
			} catch (EmptyResultDataAccessException e) {
				_logger.warn("Screenname " + p.getAuthor()
						+ " no longer in database");
				// TODO - need to set to something if the user is deleted?
				acl.setOwner(uowner);
			}
			acl.setGroup(p.getGroup());
		}
		return acl;
	}

	/**
	 * Secured version of the repository get command. User requires Create
	 * privilege on page to retrieve object.
	 * 
	 * @param parent
	 * @return
	 */
	@Secured({ "ROLE_ADMIN", "ACL_FILE_CREATE" })
	public Page getParentForCreate(String parent) {
		return get(parent);
	}

	@Secured({ "ROLE_ADMIN", "ACL_FILE_WRITE" })
	public Page getForEdit(Page page) {
		return get(page.getName());
	}

	@Secured({ "ROLE_ADMIN" })
	public Page getForAdmin(String uri) {
		return get(uri);
	}

	/**
	 * Create a new page in the store. The Page may link to an existing page, in
	 * which case we need create permissions on the parent or it may simply be
	 * an orphan which can be found be facet navigation
	 * 
	 * @param page
	 * @throws IOException
	 * @throws CorruptIndexException
	 */
	@Secured({ "ROLE_ADMIN", "ACL_FILE_CREATE" })
	public void createDocument(String parent, Page page) {
		addDocument(page);
	}

	/**
	 * Get a page depending on the URI depending on the repository
	 * 
	 * @param uri
	 * @return A Page object
	 */
	public Page get(String uri) {
		if (uri == null || uri.isEmpty()) {
			return null;
		}
		String namespace = Page.getNameSpace(uri);
		String name = Page.getShortName(uri);
		KVStore<Page> store = getStore(namespace);
		return store.get(name);
	}

	public String getEditTemplate(String uri) {
		String namespace = Page.getNameSpace(uri);
		String name = Page.getShortName(uri);
		KVStore<Page> store = getStore(namespace);

		Page p = store.get(name);
		return p.getEditTemplate();
	}

	/**
	 * for optimum performance reuse the document and field values for each
	 * document
	 * 
	 * @param page
	 * @throws CorruptIndexException
	 * @throws IOException
	 */
	@Secured({ "ROLE_ADMIN", "ACL_FILE_WRITE" })
	public void addDocument(Page page) {
		indexDocument(page);
		indexCommit();// commit changes
		String name = page.getShortName();
		String namespace = page.getNameSpace();
		KVStore<Page> store = getStore(namespace);
		_logger.debug("Adding document " + name + " into namespace "
				+ namespace);
		try {
			store.put(name, page);
		} catch (NullPointerException e) {
			_logger.error("Can't create repo for namespace " + namespace
					+ " name " + name + " because " + e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}
	}

	/**
	 * Change the page name in the store. Store may not support rename
	 * functionality so we add and index a new object then delete the old
	 * object. Page stays in same namespace.
	 * 
	 * TODO: Need to rename all related docs too.
	 * 
	 * @param oldUri
	 *            Old name in store
	 * @param newName
	 *            New name
	 * @return The page object
	 * @throws NameFormatException
	 * @throws Exception
	 */
	@Secured({ "ROLE_ADMIN" })
	public Page rename(String oldUri, String newName) {
		if (oldUri == null || oldUri.isEmpty() || newName == null
				|| newName.isEmpty()) {
			throw new RepositoryException("The page name invalid " + oldUri
					+ " " + newName);
		}

		String nameSpace = Page.getNameSpace(oldUri);
		String name = Page.getShortName(oldUri);

		KVStore<Page> store = getStore(nameSpace);
		Page p = store.get(name);
		if (Page.ROOT.equals(nameSpace)) {
			p.setName("/" + newName);
		} else {
			p.setName("/" + nameSpace + "/" + newName);
		}

		try {
			addDocument(p);
		} catch (Exception e) {
			_logger.error("Unable to add new page during rename because "
					+ e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}

		try {
			indexWriter.deleteDocuments(new Term("name", oldUri));
			indexCommit();
			store.delete(name);
		} catch (Exception e) {
			_logger.error("Unable to add delete old page during rename because "
					+ e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}

		return p;
	}

	/*
	 * @returns Name of page or comment that has been deleted
	 * 
	 * @throws CorruptIndexException, IOException - in this case the delete
	 * happened but the index was not updated properly
	 */
	@Secured({ "ROLE_ADMIN", "ACL_FILE_DELETE" })
	public String delete(String uri) {
		if (uri == null || uri.isEmpty()) {
			throw new RepositoryException("page name invalid");
		}

		String namespace = Page.getNameSpace(uri);
		String name = Page.getShortName(uri);
		KVStore<Page> store = getStore(namespace);
		// delete document from search
		_logger.debug("deleting document" + uri);
		try {
			indexWriter.deleteDocuments(new Term("name", uri));
			indexCommit();
		} catch (Exception e) {
			_logger.error(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}
		return store.delete(name);
	}

	/**
	 * Adds a document to the Lucene search engine indexer
	 * 
	 * Fields: name, title, uuid
	 * 
	 * @param page
	 * @throws IOException
	 */
	private void indexDocument(Page page) {
		Document doc = new Document();
		String namespace = page.getNameSpace();

		doc.add(new Field("name", page.getName(), Field.Store.YES,
				Field.Index.NOT_ANALYZED));
		doc.add(new Field("title", page.getTitle(), Field.Store.YES,
				Field.Index.ANALYZED));

		doc.add(new Field("uuid", page.getUuid(), Field.Store.NO,
				Field.Index.ANALYZED));

		ArrayList<String> relations = page.getRelations();
		if (relations != null) {
			for (String relation : relations) {
				doc.add(new Field("relation", relation, Field.Store.NO,
						Field.Index.ANALYZED));
			}
		}

		// relative to namespace, what do we do if no facet data?
		if (page.getEditTemplate() != null) {
			doc.add(new Field("template", page.getEditTemplate(),
					Field.Store.YES, Field.Index.NOT_ANALYZED));
		}

		doc.add(new Field("namespace", namespace, Field.Store.YES,
				Field.Index.NOT_ANALYZED));

		NumericField createDate = new NumericField("createDate",
				Field.Store.YES, true);
		createDate.setLongValue((page.getCreateDate() / 1000));

		if (page.getEditTemplate() == null) {
			// it is a comment
			doc.add(new Field("contents", WikiParser.removeAccents(page
					.getContent()), Field.Store.NO, Field.Index.ANALYZED));
		} else {
			try {
				SAXParserFactory factory = SAXParserFactory.newInstance();
				SAXParser saxParser = factory.newSAXParser();
				FacetIndexerParser facetParser = new FacetIndexerParser();

				InputSource inStream = new InputSource();
				inStream.setCharacterStream(new StringReader(page.getContent()));

				saxParser.parse(inStream, facetParser);

				doc.add(new Field("contents", WikiParser
						.removeAccents(facetParser.getContents()),
						Field.Store.NO, Field.Index.ANALYZED));

				for (Entry<String, String> e : facetParser.getFacets()) {
					if (!e.getValue().isEmpty()) {
						doc.add(new Field(e.getKey(), e.getValue(),
								Field.Store.YES, Field.Index.NOT_ANALYZED));

						_logger.debug("adding facet " + e.getKey() + ":"
								+ e.getValue());
					}
				}// for

				// override page create date
				long date = facetParser.getDate();
				if (date != -1) {
					createDate.setLongValue((date));
				}

			} catch (Exception e) {
				_logger.error("Problem parsing facet data for indexing -"
						+ page.getContent() + "- because " + e.getMessage());
				throw new RepositoryException(e.getLocalizedMessage());
			}
		}
		// store create date with second precision
		doc.add(createDate);

		try {
			_logger.debug("# docs in index " + indexWriter.numDocs());

			// Important, delete document first
			indexWriter.deleteDocuments(new Term("name", page.getName()));
			indexWriter.addDocument(doc);
			_logger.debug("# docs in index " + indexWriter.numDocs());
		} catch (Exception e) {
			_logger.error(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}
	}

	/**
	 * Commit changes and suggest to SearchManager that it might like to switch to new index.
	 */
	private void indexCommit() {
		try {
			indexWriter.commit();
			searchManager.maybeReopen();
		} catch (CorruptIndexException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	/**
	 * Faceted navigation
	 * 
	 * This works on a per template type basis. For each template we have a set
	 * of 0 or more facets. We can refine the results based on the facets
	 * already selected supplied in the facetMap until we have consumed all the
	 * facets or the user has found the result they are looking for.
	 * 
	 * @param templateName
	 *            Template name for facets
	 * @param nameSpace
	 *            Namespace to search
	 * @param map
	 *            Map of all request parameters
	 * @param start
	 *            Page to start searching from
	 * @param count
	 *            Number of items to return
	 * @returns Search results
	 */
	public SearchResults facetNavigation(String templateName, String nameSpace,
			Map<String, String[]> map, int start, int count) {
		assert templateName != null;
		assert count > 0;

		Browsable browser = null;
		IndexSearcher searcher = null;
		SearchResults searchResults = null;
		try {
			searcher = searchManager.acquire();

			Template t = templateMgr.getTemplateGroup(nameSpace).getTemplate(
					templateName);
			List<String> facetList = t.getFacets();
			// need to add facet handler for all isFacet attributes
			List<FacetHandler<?>> handlers = new ArrayList<FacetHandler<?>>();
			for (String fieldName : facetList) {
				handlers.add(new SimpleFacetHandler(fieldName));
			}
			// add the template and namespace
			handlers.add(new SimpleFacetHandler("template"));
			handlers.add(new SimpleFacetHandler("namespace"));
			BoboIndexReader boboReader = BoboIndexReader.getInstance(
					searcher.getIndexReader(), handlers);

			BrowseRequest br = new BrowseRequest();
			br.setCount(count);
			br.setOffset(start * count);
			// newest first
			br.addSortField(new SortField("createDate", SortField.LONG, true));

			// go through facet list
			// BrowseSelection: A selection or filter to be applied, e.g.
			// Activity=Skiing
			StringBuilder queryString = new StringBuilder();
			for (String fieldName : facetList) {
				if (!map.containsKey(fieldName)) {
					FacetSpec facetSpec = new FacetSpec();
					facetSpec.setMinHitCount(1);
					facetSpec.setOrderBy(FacetSortSpec.OrderHitsDesc);
					br.setFacetSpec(fieldName, facetSpec);
				} else {
					BrowseSelection sel = new BrowseSelection(fieldName);
					String value = map.get(fieldName)[0];
					sel.addValue(value);
					br.addSelection(sel);
					queryString.append(fieldName);
					queryString.append("=");
					queryString.append(value);
					queryString.append("&");
				}
			}
			// add template and namespace
			BrowseSelection sel1 = new BrowseSelection("template");
			sel1.addValue(templateName);
			br.addSelection(sel1);
			queryString.append("template=");
			queryString.append(templateName);
			queryString.append("&");
			BrowseSelection sel2 = new BrowseSelection("namespace");
			sel2.addValue(nameSpace);
			br.addSelection(sel2);
			queryString.append("namespace=");
			queryString.append(nameSpace);
			// perform browse
			browser = new BoboBrowser(boboReader);
			BrowseResult result;

			result = browser.browse(br);

			// search query result
			int totalHits = result.getNumHits();
			searchResults = new SearchResults(start, count, totalHits,
					queryString.toString());
			BrowseHit[] nhits = result.getHits();
			for (int i = 0; i < nhits.length; i++) {
				Document doc = searcher.getIndexReader().document(
						nhits[i].getDocid());
				Result r = new Result(doc.get("name"));
				r.setTitle(doc.get("title"));
				r.setDate(doc.get("createDate"));
				searchResults.addResult(r);
			}

			for (Map.Entry<String, FacetAccessible> entry : result
					.getFacetMap().entrySet()) {
				FacetItem facetItem = new FacetItem(entry.getKey());
				searchResults.addFacetItem(facetItem);

				FacetAccessible fa = entry.getValue();
				List<BrowseFacet> bf = fa.getFacets();
				for (BrowseFacet browseFacet : bf) {
					facetItem.addFacet(browseFacet.getValue(),
							browseFacet.getHitCount());
				}
			}// for

		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		} finally {
			if (searcher != null) {
				try {
					searchManager.release(searcher);
				} catch (IOException e) {
					_logger.error(e.getMessage());
				}
				searcher = null;
			}
		}

		return searchResults;
	}

	public TreeSet<String> getFacetValues(String facetName, String template,
			String searchTerm) throws IOException, ParseException {
		IndexSearcher searcher = null;
		searcher = searchManager.acquire();

		SimpleFacetHandler categoryHandler = new SimpleFacetHandler(facetName);
		List<FacetHandler<?>> handlerList = Arrays
				.asList(new FacetHandler<?>[] { categoryHandler,
						new SimpleFacetHandler("template") });

		// BoboIndexReader: A Lucene IndexReader containing a List of
		// FacetHandlers.
		IndexReader reader = searcher.getIndexReader();
		reader = searcher.getIndexReader();
		BoboIndexReader boboReader = BoboIndexReader.getInstance(reader,
				handlerList);

		BrowseRequest br = new BrowseRequest();

		String qt = searchTerm + "*";
		QueryParser parser = new QueryParser(Version.LUCENE_35, facetName,
				new KeywordAnalyzer());
		parser.setLowercaseExpandedTerms(false);
		Query qr = parser.parse(qt);
		br.setQuery(qr);

		br.setCount(10);
		br.setOffset(0);

		BrowseSelection sel1 = new BrowseSelection("template");
		sel1.addValue(template);
		br.addSelection(sel1);

		BrowseSelection sel2 = new BrowseSelection(facetName);
		br.addSelection(sel2);

		// add the facet output specs
		FacetSpec categorySpec = new FacetSpec();
		categorySpec.setMinHitCount(1);
		categorySpec.setOrderBy(FacetSortSpec.OrderHitsDesc);
		br.setFacetSpec(facetName, categorySpec);

		// perform browse
		Browsable browser = new BoboBrowser(boboReader);
		BrowseResult result;
		TreeSet<String> values = new TreeSet<String>();
		try {
			result = browser.browse(br);

			// search query result
			int totalHits = result.getNumHits();
			BrowseHit[] nhits = result.getHits();

			Map<String, FacetAccessible> facetMap = result.getFacetMap();

			FacetAccessible colorFacets = facetMap.get(facetName);
			if (colorFacets != null) {
				List<BrowseFacet> facetVals = colorFacets.getFacets();
				for (BrowseFacet bf : facetVals) {
					values.add(WordUtils.capitalize(bf.getValue()));
				}
			}
		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		} finally {
			if (browser != null) {
				browser.close();
			}
		}
		if (searcher != null) {
			try {
				searchManager.release(searcher);
			} catch (IOException e) {
				_logger.error(e.getMessage());
			}
			searcher = null;
		}

		return values;
	}

	/**
	 * Search repository
	 * 
	 * Query string supports the Lucene syntax. We can page through results
	 * using npage and nresults.
	 * 
	 * ToDO: order by arbitary field or even facet value
	 * 
	 * @param q
	 *            , the query string
	 * @param start
	 *            , starting page to fetch results
	 * @param count
	 *            , number of results to return
	 * @returns list of pages
	 */
	public SearchResults search(String q, int start, int count) {
		IndexSearcher searcher = null;
		SearchResults results = null;

		try {
			// or multifield query parser
			Query query = new QueryParser(Version.LUCENE_35, "contents",
					analyzer).parse(q);

			searcher = searchManager.acquire();

			Sort s = new Sort(new SortField("createDate", SortField.LONG, true));
			// fetch
			// TODO, upgrade Lucene and use searchAfter for paging
			TopDocs hits = searcher.search(query, (start + 1) * count, s);

			// int total = hits.totalHits;
			results = new SearchResults(start, count, hits.totalHits, q);
			for (int i = (start * count); i < (start + 1) * count
					&& i < hits.scoreDocs.length; i++) {
				Document document = searcher.doc(hits.scoreDocs[i].doc);

				Result result = new Result(document.get("name"));
				result.setTitle(document.get("title"));
				result.setDate(document.get("createDate"));
				results.addResult(result);
			}
		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		} finally {
			if (searcher != null) {
				try {
					searchManager.release(searcher);
				} catch (IOException e) {
					_logger.error(e.getLocalizedMessage());
				}
				searcher = null; // make sure it can't be used again
			}
		}
		return results;
	}

	/**
	 * Should just reindex all namespaces or search for docs in namespace.
	 * 
	 * @param nameSpace
	 */
	@Secured({ "ROLE_ADMIN" })
	public void reindex(String nameSpace) {
		try {
			indexWriter.deleteAll();
		} catch (IOException e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}
		KVStore<Page> store = getStore(nameSpace);
		store.titles(1, 10);

		store.export(((new ExportHelper() {
			public String toXML(Object o) {
				Page p = (Page) o;

				indexDocument(p);
				return p.getName();
			}
		})));
		indexCommit();
	}

	@Secured({ "ROLE_ADMIN" })
	public void changeNamespace(final String nameSpace,
			final String newNameSpace) {
		final KVStore<Page> store = getStore(nameSpace);

		store.export(((new ExportHelper() {
			public String toXML(Object o) {
				Page p = (Page) o;

				String oldUri = p.getName();
				p.setName("/" + newNameSpace + "/" + p.getShortName());
				store.put(p.getShortName(), p);
				try {
					indexWriter.deleteDocuments(new Term("name", oldUri));
					indexDocument(p);
				} catch (Exception e) {
					_logger.debug(e.getLocalizedMessage());
					throw new RepositoryException(e.getLocalizedMessage());
				}

				return p.getName();
			}
		})));
		indexCommit();

	}

	@Secured({ "ROLE_ADMIN" })
	public void saveAsXML(String nameSpace) {
		// JAXB

		final Marshaller marshaller;
		try {
			final JAXBContext context = JAXBContext.newInstance(Page.class);

			marshaller = context.createMarshaller();
			marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT,
					Boolean.TRUE);
			marshaller.setProperty(Marshaller.JAXB_ENCODING, "UTF-8");
			marshaller.setProperty(Marshaller.JAXB_FRAGMENT, Boolean.TRUE);

			final OutputStreamWriter os = new OutputStreamWriter(
					new FileOutputStream(nameSpace + "-backup.xml"), "UTF-8");
			os.write("<?xml version='1.0' encoding='UTF-8' standalone='yes'?><pages>");

			KVStore<Page> store = getStore(nameSpace);
			store.titles(1, 10);
			store.export(new ExportHelper() {
				public String toXML(Object o) {
					Page p = (Page) o;

					try {
						marshaller.marshal(p, os);
					} catch (JAXBException e) {
						_logger.debug(e.getLocalizedMessage());
						throw new RepositoryException(e.getLocalizedMessage());
					}
					return p.getName();
				}
			});
			os.write("</pages>");
			os.close();
		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}
	}

	@Secured({ "ROLE_ADMIN" })
	public void loadFromXML(String nameSpace) {
		final KVStore<Page> store = getStore(nameSpace);
		InputStream is = null;
		try {
			is = new FileInputStream(nameSpace + "-backup.xml");

			ObjectUnmarshaller<Page> oum = new ObjectUnmarshaller<Page>(is,
					Page.class);
			Page page;
			while ((page = oum.next()) != null) {
				store.put(page.getShortName(), page);
			}
			oum.close();
		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		} finally {
			try {
				if (is != null) {
					is.close();
				}
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}

	}

	public int addComment(Page page) {
		String name = page.getShortName();
		String namespace = page.getNameSpace();
		KVStore<Page> store = getStore(namespace);
		_logger.debug("Adding comment " + name + " into namespace "
				+ namespace);
		try {
			return store.addRelated(name, page);
		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}
	}

	public KVResults<Page> getComments(String uri, int page, int count) {
		String name = Page.getShortName(uri);
		String namespace = Page.getNameSpace(uri);
		KVStore<Page> store = getStore(namespace);
		_logger.debug("Getting comments for " + name + " from namespace "
				+ namespace);
		try {
			return store.getRelated(name, page, count);
		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}
	}

	@PreDestroy
	public void close() {
		try {
			if (indexWriter != null) {
				indexWriter.close();
			}
			if (searchManager != null) {
				searchManager.close();
			}
		} catch (Exception e) {
			_logger.debug(e.getLocalizedMessage());
			throw new RepositoryException(e.getLocalizedMessage());
		}

		for (KVStore<Page> store : storeMap.values()) {
			store.close();
		}
		// need to loop over store and do a treap.close()
	}
}

class MySearchWarmer implements SearcherWarmer {
	@Override
	public void warm(IndexSearcher searcher) throws IOException {
		// Run some diverse searches, searching and sorting against all
		// fields that are used by your application
	}
}
