package de.unikoblenz.west.csxpoi.server;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;

import org.openrdf.model.Literal;
import org.openrdf.model.URI;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQuery;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;

//import com.sun.org.apache.bcel.internal.generic.StoreInstruction;

/**
 * Detects and merges duplicate POIs by adding equivalences to the members of
 * found clusters pointing to the medoid.
 */
public class DuplicatePoiFinder {

	private static final int POI = 1;
	private static final int EVENT = 2;
	private static Similarity mSimilarity;

	/**
	 * the virtual user for the contributions
	 */
	private static final String DUPLICATE_POI_USER = Constants.NS_USER
			+ "duplicatePoiFinder";

	/**
	 * starts the detection and merging of duplicate POIs
	 * 
	 * @param connection
	 *            a repository connection
	 * @param writer
	 *            a writer for the logging output
	 * @throws RepositoryException
	 * @throws MalformedQueryException
	 * @throws QueryEvaluationException
	 * @throws SimilarityException
	 */
	public static void execute(RepositoryConnection connection,
			PrintWriter writer) throws RepositoryException,
			MalformedQueryException, QueryEvaluationException,
			SimilarityException {

		List<PoiWrapper> results = retrieveAllPois(connection);
		List<PoiWrapper> pois = new ArrayList<PoiWrapper>();
		List<PoiWrapper> events = new ArrayList<PoiWrapper>();

		// Split between POIs and Events
		for (PoiWrapper poi : results) {
			if (poi.getStartDate() != "" && poi.getEndDate() != "")
				events.add(poi);
			if (poi.getStartDate() != "" || poi.getEndDate() != "")
				pois.add(poi);
		}

		// process POIs
		mSimilarity = SimilarityFactory.getSimilarity(POI);
		storeClusters(Clustering.dbscan(pois, connection, mSimilarity),
				connection, writer);
		writeClusters(Clustering.dbscan(pois, connection, mSimilarity));

		// process Events
		mSimilarity = SimilarityFactory.getSimilarity(EVENT);
		storeClusters(Clustering.dbscan(events, connection, mSimilarity),
				connection, writer);
		writeClusters(Clustering.dbscan(events, connection, mSimilarity));
	}

	
	/**
	 * Writes a clustering information to log file
	 * 
	 * @param clusters
	 */
	private static void writeClusters(Set<Cluster> clusters) {
		try {
			File log = new File("revisionengine.log");
			FileWriter fstream = new FileWriter(log);
			BufferedWriter out = new BufferedWriter(fstream);
			for (Cluster cluster : clusters) {

				out.write("POIs for Medoid "
						+ cluster.getMedoid(mSimilarity).getName() + ": \n");
				for (PoiWrapper poi : cluster.getMembers()) {
					out.write("Name: " + poi.getName() + ", Location: "
							+ poi.getLatitude() + "-" + poi.getLongitude()
							+ "\n");

					System.out.println("Name: " + poi.getName()
							+ ", Location: " + poi.getLatitude() + "-"
							+ poi.getLongitude() + "\n");
				}

			}
			out.close();
		} catch (Exception e) {
			e.printStackTrace();
		}

	}

	/**
	 * Stores a clustering information in repository
	 * 
	 * @param clusters
	 * @param connection
	 * @param writer
	 * @throws RepositoryException
	 * @throws MalformedQueryException
	 * @throws QueryEvaluationException
	 * @throws SimilarityException
	 */

	private static void storeClusters(Set<Cluster> clusters,
			RepositoryConnection connection, PrintWriter writer)
			throws RepositoryException, MalformedQueryException,
			QueryEvaluationException, SimilarityException {

		ValueFactory valueFactory = connection.getValueFactory();

		removePreviousContributions(connection);

		URI rdfTypeUri = valueFactory.createURI(Constants.URI_RDF_TYPE);
		URI poiModificationUri = valueFactory
				.createURI(Constants.URI_BASE_POI_MODIFICATION);
		URI affectsPoiUri = valueFactory
				.createURI(Constants.URI_BASE_AFFECTS_POI);
		URI issuedAtUri = valueFactory.createURI(Constants.URI_BASE_ISSUED_AT);
		URI dateTimeUri = valueFactory.createURI(Constants.URI_XSD_DATE_TIME);
		URI userUri = valueFactory.createURI(DUPLICATE_POI_USER);
		URI issuedByUri = valueFactory.createURI(Constants.URI_BASE_ISSUED_BY);
		URI addsPoiPropertyUri = valueFactory
				.createURI(Constants.URI_BASE_ADDS_POI_PROPERTY);
		URI owlSameAsUri = valueFactory.createURI(Constants.URI_OWL_SAME_AS);
		URI poiPropertyValueUri = valueFactory
				.createURI(Constants.URI_BASE_POI_PROPERTY_VALUE);

		for (Cluster cluster : clusters) {

			PoiWrapper medoid = cluster.getMedoid(mSimilarity);
			URI medoidUri = valueFactory.createURI(medoid.getUri());

			for (PoiWrapper poi : cluster) {

				if (poi.equals(medoid))
					continue;

				String contributionId = OntologyHelper
						.generateContributionUri(connection);

				URI poiUri = valueFactory.createURI(poi.getUri());
				URI contributionUri = valueFactory.createURI(contributionId);
				Literal dateTimeLiteral = valueFactory.createLiteral(
						OntologyHelper.generateDateTimeString(), dateTimeUri);

				connection.add(contributionUri, rdfTypeUri, poiModificationUri);
				RdfWriter.writeNTriple(writer, contributionUri, rdfTypeUri,
						poiModificationUri);

				connection.add(contributionUri, affectsPoiUri, poiUri);
				RdfWriter.writeNTriple(writer, contributionUri, affectsPoiUri,
						poiUri);

				connection.add(contributionUri, issuedAtUri, dateTimeLiteral);
				RdfWriter.writeNTriple(writer, contributionUri, issuedAtUri,
						dateTimeLiteral);

				connection.add(contributionUri, issuedByUri, userUri);
				RdfWriter.writeNTriple(writer, contributionUri, issuedByUri,
						userUri);

				connection.add(contributionUri, addsPoiPropertyUri,
						owlSameAsUri);
				RdfWriter.writeNTriple(writer, contributionUri,
						addsPoiPropertyUri, owlSameAsUri);

				connection.add(contributionUri, poiPropertyValueUri, medoidUri);
				RdfWriter.writeNTriple(writer, contributionUri,
						poiPropertyValueUri, medoidUri);

				writer.println();
			}
		}

	}

	/**
	 * removes all previous contributions by the same virtual user
	 * 
	 * @param connection
	 *            a repository connection
	 * @throws RepositoryException
	 * @throws MalformedQueryException
	 * @throws QueryEvaluationException
	 */
	private static void removePreviousContributions(
			RepositoryConnection connection) throws RepositoryException,
			MalformedQueryException, QueryEvaluationException {

		TupleQuery query = connection.prepareTupleQuery(QueryLanguage.SPARQL,
				"PREFIX base: <" + Constants.NS_BASE + "> " + "PREFIX geo: <"
						+ Constants.NS_GEO + "> " + "PREFIX rdfs: <"
						+ Constants.NS_RDFS + "> "
						+ "SELECT ?contribution WHERE {"
						+ " ?contribution a base:Contribution ;"
						+ "  base:issuedBy ?user ." + " FILTER (?user = <"
						+ DUPLICATE_POI_USER + ">)" + "}");
		TupleQueryResult result = query.evaluate();
		try {
			while (result.hasNext()) {
				BindingSet bindingSet = result.next();
				URI contribution = (URI) bindingSet.getValue("contribution");
				connection.remove(contribution, null, null);
			}
		} finally {
			result.close();
		}
	}

	/**
	 * Retrieves all POIs in the repository
	 * 
	 * @param connection
	 *            a repository connection
	 * @return a list of all POIs in the repository
	 * @throws RepositoryException
	 * @throws MalformedQueryException
	 * @throws QueryEvaluationException
	 */
	private static List<PoiWrapper> retrieveAllPois(
			RepositoryConnection connection) throws RepositoryException,
			MalformedQueryException, QueryEvaluationException {

		List<PoiWrapper> pois = new ArrayList<PoiWrapper>();

		TupleQuery query = connection.prepareTupleQuery(QueryLanguage.SPARQL,
				"PREFIX base: <" + Constants.NS_BASE + "> " + "PREFIX rdfs: <"
						+ Constants.NS_RDFS + "> " + "SELECT ?poi WHERE {"
						+ " ?poi a base:Poi ." + "}");
		TupleQueryResult result = query.evaluate();
		try {
			while (result.hasNext()) {
				BindingSet bindingSet = result.next();
				PoiWrapper poi = PoiWrapper.createFromUri(
						bindingSet.getValue("poi").stringValue(), connection);
				if (poi != null)
					pois.add(poi);
			}
		} finally {
			result.close();
		}

		return pois;
	}

}
