/**
 * 
 */
package info.sswap.ontologies.modularity.repo.rdf;

import info.sswap.ontologies.modularity.repo.OntologyRepository;
import info.sswap.ontologies.modularity.repo.OntologyRepositoryException;
import info.sswap.ontologies.modularity.repo.RepositoryEntry;
import info.sswap.ontologies.modularity.repo.TermCloud;

import java.io.File;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;

import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.query.BindingSet;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQuery;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.config.RepositoryConfigException;
import org.openrdf.repository.manager.LocalRepositoryManager;
import org.openrdf.repository.manager.RepositoryManager;

import edu.arizona.bio5.onto.decomposition.persistent.ADPersistenceException;
import edu.arizona.bio5.onto.decomposition.persistent.PersistentAtomicDecomposition;
import edu.arizona.bio5.onto.decomposition.persistent.rdf.SesameADLoader;

/**
 * Ontology repository which stores decomposed ontologies in RDF
 * 
 * @author Pavel Klinov
 *
 */
public class RDFOntologyRepository implements OntologyRepository { 

	protected final static String ADMIN_DB_NAME = "admin";
	protected final static String TERM_CLOUD_DB_NAME = "term-cloud";
	//Maps term or namespace URI to the ontology URI
	private Map<String, Collection<URI>> mTermIndex = new HashMap<String, Collection<URI>>();
	//Maps ontology URI to the name of the databases which stores it
	private Map<URI, String> mUriDBMap = new HashMap<URI, String>();
	private final File mRoot;
	//Maps each ontology URI to its repository entry
	private Map<URI, RepositoryEntry> mUriEntryMap = new HashMap<URI, RepositoryEntry>();
	//Sesame's repository manager
	private RepositoryManager mRepoMgr = null;
	/*
	 * ***************************************
	 * SPARQL queries
	 * ***************************************
	 */
	private static final String PREFIX =	"PREFIX rdf: <" + RDF.NAMESPACE + "> \n" +
											"PREFIX rdfs: <" + RDFS.NAMESPACE + "> \n\n";
	
	private static final String GET_NAMESPACES_QUERY = PREFIX + "SELECT DISTINCT ?ns ?onto \n" +
							"WHERE {\n " +
							"?ns rdf:type <" + SSWAPRDFConstants.SSWAP_NS_TYPE + "> . \n" +
							"?ns rdfs:isDefinedBy ?onto . \n" +
							"}";
	
	private static final String GET_DATABASES_QUERY = PREFIX + "SELECT DISTINCT ?onto ?db \n" +
			"WHERE {\n " +
			"?onto rdf:type <" + OWL.ONTOLOGY + "> . \n" +
			"?onto <" + SSWAPRDFConstants.SSWAP_STORED_IN_PROPERTY + "> ?db . \n" +
			"}";
	
	/**
	 * 
	 * @param rootDir Root directory for all RDF databases
	 */
	public RDFOntologyRepository(File rootDir) throws OntologyRepositoryException{
		mRoot = rootDir;
		
		try {
			init();
		} catch (RepositoryException e) {
			throw new OntologyRepositoryException(e);
		} catch (RepositoryConfigException e) {
			throw new OntologyRepositoryException(e);
		} catch (QueryEvaluationException e) {
			throw new OntologyRepositoryException(e);
		}
	}
	
	/*
	 * InitializeS the RDF repository
	 */
	void init() throws RepositoryException, RepositoryConfigException, QueryEvaluationException, OntologyRepositoryException {
		Repository adminRepo = null;
		RepositoryConnection adminConn = null;
		
		try {
			initRepoManager();
			adminRepo = getRepoManager().getRepository(ADMIN_DB_NAME);

			if (adminRepo == null) throw new OntologyRepositoryException("Admin repository not found");

			adminConn = adminRepo.getConnection();
			// Init the mapping between ontology URIs and DB names
			loadURI2DBMap(adminConn);
			// Init the term index by reading the admin DB
			loadTermIndex(adminConn);
			adminConn.commit();
		}
		catch(Throwable e) {
			mRepoMgr.shutDown();
			throw new OntologyRepositoryException(e);
		} finally {
			if (adminConn != null) adminConn.close();
		}
	}
	
	public void shutdown() {
		mRepoMgr.shutDown();
	}

	/*
	 * <ontology URI> rdf:type owl:Ontology
	 * <ontology URI> sswap:storedIn "db name"^^xsd:string
	 */
	private void loadURI2DBMap(RepositoryConnection adminConn) throws RepositoryException, QueryEvaluationException, RepositoryConfigException {
		TupleQuery query = getSPARQLQuery(GET_DATABASES_QUERY, adminConn);		
		TupleQueryResult results = query.evaluate();
		
		while (results.hasNext()) {
			BindingSet bindings = results.next();
			String dbName = bindings.getValue("db").stringValue();
			//Check that such database exists
			if (mRepoMgr.getRepository(dbName) != null) {
				mUriDBMap.put(URI.create(bindings.getValue("onto").stringValue()), dbName);
			}
			else {
				//TODO Log it
				System.err.println("Database " + dbName + " not found");
			}
		}
		
		mUriDBMap.put(TermCloudEntry.TERM_CLOUD_URI, TERM_CLOUD_DB_NAME);
		results.close();
	}

	/*
	 * Loads the term index by querying for the following triples in the admin DB:
	 * <namespace URI> rdf:type sswap:Namespace
	 * <namespace URI> rdfs:isDefinedBy <ontology URI>
	 */
	private void loadTermIndex(RepositoryConnection adminConn) throws RepositoryException, QueryEvaluationException {
		TupleQuery query = getSPARQLQuery(GET_NAMESPACES_QUERY, adminConn);		
		TupleQueryResult results = query.evaluate();
		
		while (results.hasNext()) {
			BindingSet bindings = results.next();
			String ns = bindings.getValue("ns").stringValue();
			Collection<URI> entryURIs = mTermIndex.get(ns);
			
			entryURIs = entryURIs == null ? new HashSet<URI>() : entryURIs;
			entryURIs.add(URI.create(bindings.getValue("onto").stringValue()));
			mTermIndex.put(ns, entryURIs);
		}
		
		System.out.println("Namespaces for repo " + mRoot.getName().toString());
		for (String ns : mTermIndex.keySet()) {
			System.out.println("Namespace " + ns + " defined in " + mTermIndex.get(ns));
		}
		
		results.close();
	}
	
	private TupleQuery getSPARQLQuery(String queryString, RepositoryConnection conn) throws RepositoryException {		
		try {
			return conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
		} catch (MalformedQueryException e) {
			throw new RuntimeException(e);
		}
	}

	private void initRepoManager() throws RepositoryException, RepositoryConfigException {
		mRepoMgr = new LocalRepositoryManager(mRoot);		
		mRepoMgr.initialize();
		//mRepoMgr.addRepositoryConfig(new RepositoryConfig("main", new SailRepositoryConfig(new NativeStoreConfig())));
	}
	
	private RepositoryManager getRepoManager() {
		return mRepoMgr;
	}


	@Override
	public Collection<RepositoryEntry> getEntries() throws OntologyRepositoryException {
		List<RepositoryEntry> entries = new ArrayList<RepositoryEntry>();
		
		for (Map.Entry<URI, String> dbEntry : mUriDBMap.entrySet()) {
			entries.add(getEntryForOntology(dbEntry.getKey()));
		}
		
		return entries;
	}

	private PersistentAtomicDecomposition loadADFromRDF(String dbName, URI ontoURI) throws RepositoryException, RepositoryConfigException {		
		//connect to the right database
		Repository repo = null;
		
		try {
			repo = mRepoMgr.getRepository(dbName);
			SesameADLoader loader = new SesameADLoader(repo, ontoURI.toString());
			
			return loader.load();
		} catch (ADPersistenceException e) {
			throw new RepositoryException(e);
		}
	}

	@Override
	public RepositoryEntry getEntryForOntology(URI ontologyURI) throws OntologyRepositoryException {
		RepositoryEntry entry = mUriEntryMap.get(ontologyURI);

		if (entry == null) {
			String dbName = mUriDBMap.get(ontologyURI);
			
			try {
				if (dbName == null || !mRepoMgr.getRepositoryIDs().contains(dbName)) {
					//No such entry
					System.err.println("No repository entry found for " + ontologyURI);
					
					return null;
				}
				
				PersistentAtomicDecomposition ad = loadADFromRDF(dbName, ontologyURI);
				
				entry = ontologyURI.equals(TermCloudEntry.TERM_CLOUD_URI) ? new TermCloudEntry(ad) : new RDFADEntry(ad);
				mUriEntryMap.put(ontologyURI, entry);
				
			} catch (RepositoryException e) {
				throw new OntologyRepositoryException(e);
			} catch (RepositoryConfigException e) {
				throw new OntologyRepositoryException(e);
			}
		}

		return entry;
	}

	@Override
	public Collection<RepositoryEntry> getEntriesForTerm(URI termURI) throws OntologyRepositoryException {
		Collection<URI> entryURIs = mTermIndex.get(termURI.toString());
		Collection<RepositoryEntry> entries = new ArrayList<RepositoryEntry>();
		
		if (entryURIs != null) {
			for (URI entry : entryURIs) {
				entries.add(getEntryForOntology(entry));
			}
			
			return entries;
		}
		else {
			return Collections.emptySet();
		}
	}

	@Override
	public TermCloud getTermCloud() throws OntologyRepositoryException {
		return (TermCloud) getEntryForOntology(TermCloudEntry.TERM_CLOUD_URI);
	}
}