/*
 * The contents of this file are subject to the Mozilla Public License
 * Version 1.1 (the "License");  you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 * http//www.mozilla.org/MPL/
 *
 * Software distributed under the License is distributed on an "AS IS" basis,
 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
 * the specific language governing rights and limitations under the License.
 *
 * The Original Code is ART Ontology API.
 *
 * The Initial Developer of the Original Code is University of Roma Tor Vergata.
 * Portions created by University of Roma Tor Vergata are Copyright (C) 2007.
 * All Rights Reserved.
 *
 * ART Ontology API was developed by the Artificial Intelligence Research Group
 * (art.uniroma2.it) at the University of Roma Tor Vergata
 * Current information about the ART Ontology API can be obtained at 
 * http//art.uniroma2.it/owlart
 *
 */

/*
 * Contributor(s): Armando Stellato stellato@info.uniroma2.it
 */
package it.uniroma2.art.owlart.utilities;

import it.uniroma2.art.owlart.exceptions.ModelAccessException;
import it.uniroma2.art.owlart.exceptions.ModelUpdateException;
import it.uniroma2.art.owlart.model.ARTBNode;
import it.uniroma2.art.owlart.model.ARTNode;
import it.uniroma2.art.owlart.model.ARTResource;
import it.uniroma2.art.owlart.model.ARTStatement;
import it.uniroma2.art.owlart.model.ARTURIResource;
import it.uniroma2.art.owlart.model.NodeFilters;
import it.uniroma2.art.owlart.models.DirectReasoning;
import it.uniroma2.art.owlart.models.OWLModel;
import it.uniroma2.art.owlart.models.RDFModel;
import it.uniroma2.art.owlart.models.RDFSModel;
import it.uniroma2.art.owlart.models.SKOSModel;
import it.uniroma2.art.owlart.models.SKOSXLModel;
import it.uniroma2.art.owlart.navigation.ARTNamespaceIterator;
import it.uniroma2.art.owlart.navigation.ARTResourceIterator;
import it.uniroma2.art.owlart.navigation.ARTStatementIterator;
import it.uniroma2.art.owlart.vocabulary.RDFResourceRolesEnum;
import it.uniroma2.art.owlart.vocabulary.RDFTypesEnum;

import java.util.ArrayList;
import java.util.Collection;

import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;

public class ModelUtilities {

	/**
	 * this method tells if namespace <code>ns</code> is present among the list of namespaces in model
	 * <code>om</code>
	 * 
	 * @param rep
	 * @param ns
	 * @return
	 * @throws ModelAccessException
	 */
	public static boolean isAvailableNamespace(RDFModel om, String ns) throws ModelAccessException {
		ARTNamespaceIterator nss = om.listNamespaces();
		boolean avail = false;
		while (avail == false && nss.streamOpen())
			avail = nss.next().equals(ns);
		return avail;
	}

	/**
	 * given namespace <code>namespace</code>, this tries to automatically suggest a prefix for it
	 * 
	 * @param namespace
	 * @return
	 */
	public static String guessPrefix(String namespace) {
		int lowerCutIndex;
		String tempString;

		if (namespace.endsWith("/") || namespace.endsWith("#"))
			tempString = namespace.substring(0, namespace.length() - 1);
		else
			tempString = namespace;

		if (tempString.matches(".*\\.(\\w{2}|\\w{3})"))
			tempString = tempString.substring(0, tempString.lastIndexOf("."));

		int pointLowerCutIndex = tempString.lastIndexOf(".");
		int slashLowerCutIndex = tempString.lastIndexOf("/");
		if (pointLowerCutIndex > slashLowerCutIndex)
			lowerCutIndex = pointLowerCutIndex;
		else
			lowerCutIndex = slashLowerCutIndex;

		return tempString.substring(lowerCutIndex + 1);
	}

	/**
	 * given the baseuri of an ontology, returns the baseuri
	 * 
	 * @param baseuri
	 * @return
	 */
	public static String createDefaultNamespaceFromBaseURI(String baseuri) {
		if (baseuri.endsWith("/") || baseuri.endsWith("#"))
			return baseuri;
		else
			return baseuri + "#";
	}

	/**
	 * this method helps in deleting individuals and their associated info. the PropertyChainsTree is used to
	 * distinguish which linked nodes in the graph should be deleted together with the main individual <br/>
	 * If an individual reachable through the property chain is however referred (it is object of a triple) by
	 * other individuals, then it is not deleted
	 * 
	 * @param resource
	 * @param model
	 * @throws ModelUpdateException
	 */
	public static void deepDeleteIndividual(ARTResource resource, RDFModel model,
			PropertyChainsTree delPropTree, ARTResource... graphs) throws ModelUpdateException {

		// the recursion is sound. The basic assumption is that the first resource (first call) is always to
		// be deleted, while the linked ones are not necessarily deleted (see below), so the first call
		// immediately deletes the resource, while the checks for any subsequent call (if delete or not) are
		// made at the end of the previous call (so this code contains the check for the next invocation of
		// this function)

		// 1) removes all the incoming edges
		model.deleteTriple(NodeFilters.ANY, NodeFilters.ANY, resource, graphs);

		// 2) mark all objects pointed through outgoing properties in the PropertyChainsTree as
		// "to be deleted" before deleting all triples having resource as their subject
		Multimap<ARTResource, ARTURIResource> deletePropagation = HashMultimap.create();
		ARTStatementIterator stit;

		try {
			// instead of cycling on the delPropTree.getChainedProperties to get all the triples of type
			// (resource, delPropTree.getChainedProperties.getNext(), ANY) we simply list of the outgoing
			// triples and then, for each of them, we check if the predicate is among the properties. We thus
			// get all the predicates linking resource to any object, and if the property chain tree contains
			// two properties p1 and p2 and there exist triples:
			// resource p1 object
			// resource p2 object
			// then both the pairs <object, p1> and <object, p2> are added to the deletePropagation map
			stit = model.listStatements(resource, NodeFilters.ANY, NodeFilters.ANY, false);
			while (stit.streamOpen()) {
				ARTStatement st = stit.next();
				ARTURIResource predicate = st.getPredicate();
				ARTNode object = st.getObject();
				if (delPropTree.hasChained(predicate) && object.isResource()) // the second check should be
					// useless, since the chosen propagation property should be only object properties or
					// simple properties linking to resources (not literals)
					deletePropagation.put(object.asResource(), predicate);
			}
			stit.close();
		} catch (ModelAccessException e) {
			throw new ModelUpdateException(e);
		}

		// 3) outgoing edges from resource to objects are now deleted (after info from 2 has been collected)
		model.deleteTriple(resource, NodeFilters.ANY, NodeFilters.ANY, graphs);

		// 4) make a recursion over the deletePropagation map
		for (ARTResource object : deletePropagation.keySet()) {
			try {
				// outgoing edges from resource to objects have already been deleted by last operation; if
				// there are further incoming edges to these objects, then they should not be deleted (that
				// is, the object has its own existence in the ontology which is independent from the deleted
				// instance) so here we check if there are other incoming edges and use this check in the
				// following if statement
				stit = model.listStatements(NodeFilters.ANY, NodeFilters.ANY, object, false);
				// TODO changed this to accomodate for possible use in RDFModel (not RDFS), but with the inner
				// check for repository type it is really unefficient. Modify it accordingly
				if ((model instanceof RDFSModel) && !((RDFSModel) model).isClass(object)
						&& !model.isProperty(object) && !stit.streamOpen())
					// so far: checks that new object is not a class or property, and that it has no other
					// incoming connections
					deepDeleteIndividual(object, model,
							delPropTree.getNextChains(deletePropagation.get(object)));
				stit.close();
			} catch (ModelAccessException e) {
				throw new ModelUpdateException(e);
			}
		}
	}

	/**
	 * creates the <a href="http://www.w3.org/Submission/CBD/">Concise Bounded Description</a> for an RDF
	 * Resource
	 * 
	 * @param model
	 * @param resource
	 * @param inferred
	 * @param graphs
	 * @return
	 * @throws ModelAccessException
	 * 
	 * @see {@link http://www.w3.org/Submission/CBD/}
	 */
	public static Collection<ARTStatement> createCBD(RDFModel model, ARTResource resource, boolean inferred,
			ARTResource... graphs) throws ModelAccessException {
		ArrayList<ARTStatement> stats = new ArrayList<ARTStatement>();
		createCBD(model, resource, inferred, stats, new ArrayList<ARTResource>(), graphs);
		return stats;
	}

	/**
	 * creates a customized <a href="http://www.w3.org/Submission/CBD/">Concise Bounded Description</a> for an
	 * RDF Resource, where the statements are expanded either according to the original constraint (i.e. the
	 * objects of triples are bnodes) or because the predicate matches a particular property chain
	 * 
	 * @param model
	 * @param resource
	 * @param inferred
	 * @param propChainTree
	 * @param graphs
	 * @return
	 * @throws ModelAccessException
	 */
	public static Collection<ARTStatement> createCustomCBD(RDFModel model, ARTResource resource,
			boolean inferred, PropertyChainsTree propChainTree, ARTResource... graphs)
			throws ModelAccessException {
		ArrayList<ARTStatement> stats = new ArrayList<ARTStatement>();
		createCustomCBD(model, resource, inferred, propChainTree, stats, new ArrayList<ARTResource>(), graphs);
		return stats;
	}

	/**
	 * creates the <a href="http://www.w3.org/Submission/CBD/">Concise Bounded Description</a> for an RDF
	 * Resource
	 * 
	 * @param model
	 * @param resource
	 * @param inferred
	 * @param statements
	 *            statements for the CBD are stored here
	 * @param marked
	 *            this collection contains all the bnodes which have been already expanded in the CBD
	 * @param graphs
	 * @throws ModelAccessException
	 * 
	 * @see {@link http://www.w3.org/Submission/CBD/}
	 */
	private static void createCBD(RDFModel model, ARTResource resource, boolean inferred,
			Collection<ARTStatement> statements, Collection<ARTResource> marked, ARTResource... graphs)
			throws ModelAccessException {
		ARTStatementIterator stats = model.listStatements(resource, NodeFilters.ANY, NodeFilters.ANY,
				inferred, graphs);
		while (stats.streamOpen()) {
			ARTStatement stat = stats.getNext();
			statements.add(stat);
			ARTNode object = stat.getObject();
			if (object.isBlank() && !marked.contains(object)) {
				ARTBNode objectBNode = object.asBNode();
				marked.add(objectBNode);
				createCBD(model, objectBNode, inferred, statements, marked, graphs);
			}
		}
		stats.close();
	}

	// TODO there's only an hole in this algorithm. It anticipates by using the two collections (actually a
	// multimap and a collection), the processing of resource bound through chained properties with respect to
	// normal bnode expansion.
	// However, this is local to the processing of a single subject.
	// What happens if we have something like that?
	// x p1 y p2 z p3 w
	// x p4 z
	//
	// and p1, p2 and p3 are in the property chain p1-->p2-->p3 but not p4?
	// actually, z is processed through <x, p4, z> before its role in the chain p1-->p2-->p3 is discovered, so
	// its contribution to expand through p3 is not exploited since it has been expanded through p4 and put in
	// the marked resources

	// proposed solution: very easy: just go along ONLY with the property chained CBD, and keep the global
	// container for to-be-visited bnodes. Once everything has been expanded recursively, start with standard
	// CBD by passing to it the set of already analyzed nodes. Keep care of things which would be left over by
	// the only-chained-tree process

	/**
	 * creates a customized <a href="http://www.w3.org/Submission/CBD/">Concise Bounded Description</a> for an
	 * RDF Resource, where the statements are expanded either according to the original constraint (i.e. the
	 * objects of triples are bnodes) or because the predicate matches a particular property chain
	 * 
	 * 
	 * @param model
	 * @param resource
	 * @param inferred
	 * @param propChainTree
	 * @param statements
	 *            statements for the CBD are stored here
	 * @param marked
	 *            this collection contains all the bnodes which have been already expanded in the CBD
	 * @param graphs
	 * @throws ModelAccessException
	 * 
	 * @see {@link http://www.w3.org/Submission/CBD/}
	 */
	private static void createCustomCBD(RDFModel model, ARTResource resource, boolean inferred,
			PropertyChainsTree propChainTree, Collection<ARTStatement> statements,
			Collection<ARTResource> marked, ARTResource... graphs) throws ModelAccessException {
		// a multimap associating different predicates (values) linking same objects (keys) to <resource>
		Multimap<ARTResource, ARTURIResource> matchedChainedProperties = HashMultimap.create();
		// the set of bnodes objects which need to be further expanded. This expansions is seconded to the
		// expansions of nodes (be them URIs or BNodes) matched through the provided property chain tree so
		// that the property chain is always brought ahead if matching on the available subgraphs
		ArrayList<ARTResource> toBeExpandedBNodes = new ArrayList<ARTResource>();

		ARTStatementIterator stats = model.listStatements(resource, NodeFilters.ANY, NodeFilters.ANY,
				inferred, graphs);
		while (stats.streamOpen()) {
			ARTStatement stat = stats.getNext();
			statements.add(stat);

			ARTURIResource predicate = stat.getPredicate();
			ARTNode object = stat.getObject();

			if (propChainTree.hasChained(predicate) && object.isResource()) {
				matchedChainedProperties.put(object.asResource(), predicate);
			}

			if (object.isBlank() && !marked.contains(object)) {
				toBeExpandedBNodes.add(object.asBNode());
			}
		}
		stats.close();

		// first make recursion over the unfolded ChainTree and the found objects
		for (ARTResource object : matchedChainedProperties.keySet()) {
			if (!marked.contains(object)) {
				marked.add(object);
				createCustomCBD(model, object, inferred,
						propChainTree.getNextChains(matchedChainedProperties.get(object)), statements,
						marked, graphs);
			}
		}

		// then expand the BNodes (which may have occasionally been expanded later)
		for (ARTResource object : toBeExpandedBNodes) {
			if (!marked.contains(object)) {
				marked.add(object);
				createCBD(model, object, inferred, statements, marked, graphs);
			}
		}
	}

	/**
	 * returns the number of resources which are <em>direct</em> instances of <code>cls</code>
	 * 
	 * @param model
	 * @param cls
	 * @return
	 * @throws ModelAccessException
	 */
	public static int getNumberOfClassInstances(DirectReasoning model, ARTResource cls, boolean direct,
			ARTResource... graphs) throws ModelAccessException {
		ARTResourceIterator it;
		if (direct)
			it = model.listDirectInstances(cls, graphs);
		else
			it = model.listInstances(cls, true, graphs);
		int instCounter = 0;
		while (it.streamOpen()) {
			instCounter++;
			it.getNext();
		}
		it.close();
		return instCounter;
	}

	/**
	 * this method checks that there is at least a triple referencing the given named resource
	 * <code>res</code> <br/>
	 * seeAlso: {@link RDFModel#retrieveURIResource(String, ARTResource...)} and
	 * {@link RDFModel#existsResource(ARTResource, ARTResource...)}
	 * 
	 * 
	 * @param model
	 * @param res
	 * @return
	 * @throws ModelAccessException
	 * @deprecated use {@link RDFModel#existsResource(ARTResource, ARTResource...) instead
	 */
	public static boolean checkExistingResource(RDFModel model, ARTResource res) throws ModelAccessException {
		if (model.hasTriple(res, NodeFilters.ANY, NodeFilters.ANY, true))
			return true;
		if (model.hasTriple(NodeFilters.ANY, NodeFilters.ANY, res, true))
			return true;
		if (res.isURIResource())
			if (model.hasTriple(NodeFilters.ANY, res.asURIResource(), NodeFilters.ANY, true))
				return true;

		return false;
	}

	/**
	 * this gets the ontType of the resource taken from the {@link RDFTypesEnum} enumerated class.<br/>
	 * This method distinguishes between
	 * <ul>
	 * <li>concept (skos)</li>
	 * <li>cls</li>
	 * <li>objectProperty, datatypeProperty, annotationProperty and property</li>
	 * <li>ontology</li>
	 * <li>individual</li>
	 * </ul>
	 * <br/>
	 * 
	 * @param resource
	 * @param ontModel
	 * @throws ModelAccessException
	 */
	public static RDFResourceRolesEnum getResourceRole(ARTResource resource, RDFModel ontModel)
			throws ModelAccessException {

		// first does the checks on the SKOSModel, then if the model is a SKOS one but the checks fail, it
		// returns the OWLModel from the SKOS one
		if (ontModel instanceof SKOSModel) {
			if (resource.isURIResource()) {
				ARTURIResource uriValue = resource.asURIResource();
				if (((SKOSModel) ontModel).isConcept(uriValue))
					return RDFResourceRolesEnum.concept;

				if (((SKOSModel) ontModel).isSKOSConceptScheme(uriValue)) {
					return RDFResourceRolesEnum.conceptScheme;
				}

				if (ontModel instanceof SKOSXLModel) {
					if (((SKOSXLModel) ontModel).isXLabel(uriValue))
						return RDFResourceRolesEnum.xLabel;
				}
			}
			// this is necessary for the next steps where an OWL Model is necessary, and de fact SKOS includes
			// owl though the java interface keeps the things separate
			ontModel = ((SKOSModel) ontModel).getOWLModel();
		}

		// PAY ATTENTION! YOU MAY BE TEMPTED TO BETTER RENDER THIS CODE BY FACTORIZING WITH RESPECT TO THE
		// MODEL TYPE, HOWEVER, THIS CODE TAKES INTO ACCOUNT THE AVERAGE STATISTICS OF VARIOUS ROLES, SO
		// A SAME MODEL TYPE IS TAKEN INTO CONSIDERATION ACROSS DIFFERENT POINTS OF THE CODE

		if ((ontModel instanceof OWLModel) && ((OWLModel) ontModel).isDataRange(resource))
			return RDFResourceRolesEnum.dataRange;

		if (ontModel instanceof RDFSModel) {
			if (((RDFSModel) ontModel).isClass(resource))
				return RDFResourceRolesEnum.cls;
		}

		if (resource.isURIResource()) {
			ARTURIResource uriValue = resource.asURIResource();

			if (ontModel.isProperty(uriValue)) {
				return getPropertyRole(uriValue, ontModel);
			}

			if ((ontModel instanceof OWLModel) && ((OWLModel) ontModel).isOntology(uriValue))
				return RDFResourceRolesEnum.ontology;
		}

		return RDFResourceRolesEnum.individual;
	}

	public static RDFResourceRolesEnum getPropertyRole(ARTURIResource property, RDFModel ontModel)
			throws ModelAccessException {
		if (ontModel instanceof OWLModel) {
			if (((OWLModel) ontModel).isObjectProperty(property))
				return RDFResourceRolesEnum.objectProperty;
			if (((OWLModel) ontModel).isDatatypeProperty(property))
				return RDFResourceRolesEnum.datatypeProperty;
			if (((OWLModel) ontModel).isAnnotationProperty(property))
				return RDFResourceRolesEnum.annotationProperty;
			if (((OWLModel) ontModel).isOntologyProperty(property))
				return RDFResourceRolesEnum.ontologyProperty;
		}
		return RDFResourceRolesEnum.property;
	}

}
