package de.unikoblenz.west.csxpoi.server;

import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;

import org.openrdf.model.Literal;
import org.openrdf.model.URI;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.BooleanQuery;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQuery;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.http.HTTPRepository;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.openrdf.sail.memory.MemoryStore;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;

/**
 * Imports categories from the LinkedGeoData vocabulary.
 */
public class LinkedGeoDataImport extends HttpServlet {

	private static final long serialVersionUID = 1L;

	/**
	 * the virtual user for the contributions
	 */
	private static final String LGD_IMPORT_USER = Constants.NS_USER
			+ "linkedGeoDataImporter";

	/**
	 * Handles GET requests.
	 */
	protected void doGet(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {

		// TODO: as standalone instead of servlet?

		PrintWriter writer = ServletHelper.utf8Writer(response);
		RdfWriter.setN3ContentType(response);
		ServletHelper.disableCaching(response);

		Repository lgdRepository = new SailRepository(new MemoryStore());
		try {
			lgdRepository.initialize();
			RepositoryConnection lgdConnection = lgdRepository.getConnection();
			try {
				URL lgdVocUrl = new URL(Constants.LINKED_GEO_DATA_VOCABULARY);
				lgdConnection.add(lgdVocUrl, lgdVocUrl.toString(),
						RDFFormat.NTRIPLES);

				TupleQuery lgdQuery = lgdConnection
						.prepareTupleQuery(
								QueryLanguage.SPARQL,
								"PREFIX lgd-voc: <"
										+ Constants.NS_LGD_VOC
										+ ">"
										+ "PREFIX owl: <"
										+ Constants.NS_OWL
										+ ">"
										+ "PREFIX rdfs: <"
										+ Constants.NS_RDFS
										+ ">"
										+ "SELECT DISTINCT ?category ?superclass WHERE {"
										+ " {"
										+ "  { ?category a owl:Class }"
										+ "  UNION"
										+ "  { ?category rdfs:subClassOf ?superclass ."
										+ "    ?superclass a owl:Class ."
										+ "   FILTER ("
										+ "    ?superclass != ?category &&"
										+ "    ?superclass != rdfs:Resource"
										+ "   )" + "  }" + " } FILTER ("
										+ "  ?category != lgd-voc:node &&"
										+ "  ?category != lgd-voc:way &&"
										+ "  ?category != lgd-voc:relation &&"
										+ "  ?category != lgd-voc:yes &&"
										+ "  ?category != lgd-voc:no" + " )"
										+ "}");
				TupleQueryResult lgdResult = lgdQuery.evaluate();
				try {
					ValueFactory valueFactory = lgdConnection.getValueFactory();

					URI rdfsSubClassOfUri = valueFactory
							.createURI(Constants.NS_RDFS + "subClassOf");
					URI rdfsLabelUri = valueFactory.createURI(Constants.NS_RDFS
							+ "label");
					URI owlSameAsUri = valueFactory.createURI(Constants.NS_OWL
							+ "sameAs");
					URI xsdStringUri = valueFactory.createURI(Constants.NS_XSD
							+ "string");
					URI basePoiUri = valueFactory.createURI(Constants.NS_BASE
							+ "Poi");
					URI rdfTypeUri = valueFactory
							.createURI(Constants.URI_RDF_TYPE);
					URI categoryCreationUri = valueFactory
							.createURI(Constants.URI_BASE_CATEGORY_CREATION);
					URI affectsCategoryUri = valueFactory
							.createURI(Constants.URI_BASE_AFFECTS_CATEGORY);
					URI issuedAtUri = valueFactory
							.createURI(Constants.URI_BASE_ISSUED_AT);
					URI dateTimeUri = valueFactory
							.createURI(Constants.URI_XSD_DATE_TIME);
					URI userUri = valueFactory.createURI(LGD_IMPORT_USER);
					URI issuedByUri = valueFactory
							.createURI(Constants.URI_BASE_ISSUED_BY);

					while (lgdResult.hasNext()) {
						BindingSet bindingSet = lgdResult.next();

						URI lgdCategory = (URI) bindingSet.getValue("category");
						URI csxPoiSuperclass = null;
						if (bindingSet.hasBinding("superclass")) {
							URI lgdSuperclass = (URI) bindingSet
									.getValue("superclass");
							csxPoiSuperclass = convertNamespace(lgdSuperclass,
									valueFactory);
						}

						URI csxPoiCategory = convertNamespace(lgdCategory,
								valueFactory);

						String name = extractName(csxPoiCategory);

						if (name == null)
							continue;

						if (csxPoiSuperclass != null
								&& extractName(csxPoiSuperclass) == null)
							continue;

						Repository csxPoiRepository = new HTTPRepository(
								Constants.SESAME_SERVER,
								Constants.REPOSITORY_ID);
						try {

							csxPoiRepository.initialize();
							RepositoryConnection csxPoiConnection = csxPoiRepository
									.getConnection();

							try {

								if (existsCategory(csxPoiCategory,
										csxPoiConnection))
									continue;

								Literal dateTimeLiteral = valueFactory
										.createLiteral(OntologyHelper
												.generateDateTimeString(),
												dateTimeUri);

								Literal nameLiteral = valueFactory
										.createLiteral(name, xsdStringUri);

								// Add new category:

								csxPoiConnection.add(csxPoiCategory,
										rdfsSubClassOfUri, basePoiUri);
								RdfWriter.writeNTriple(writer, csxPoiCategory,
										rdfsSubClassOfUri, basePoiUri);

								csxPoiConnection.add(csxPoiCategory,
										rdfsLabelUri, nameLiteral);
								RdfWriter.writeNTriple(writer, csxPoiCategory,
										rdfsLabelUri, nameLiteral);

								String creationContributionId = OntologyHelper
										.generateContributionUri(csxPoiConnection);
								URI creationContributionUri = valueFactory
										.createURI(creationContributionId);

								csxPoiConnection.add(creationContributionUri,
										rdfTypeUri, categoryCreationUri);
								RdfWriter.writeNTriple(writer,
										creationContributionUri, rdfTypeUri,
										categoryCreationUri);

								csxPoiConnection.add(creationContributionUri,
										affectsCategoryUri, csxPoiCategory);
								RdfWriter.writeNTriple(writer,
										creationContributionUri,
										affectsCategoryUri, csxPoiCategory);

								csxPoiConnection.add(creationContributionUri,
										issuedAtUri, dateTimeLiteral);
								RdfWriter.writeNTriple(writer,
										creationContributionUri, issuedAtUri,
										dateTimeLiteral);

								csxPoiConnection.add(creationContributionUri,
										issuedByUri, userUri);
								RdfWriter.writeNTriple(writer,
										creationContributionUri, issuedByUri,
										userUri);

								// Add interlink to LinkedGeoData category:

								csxPoiConnection.add(csxPoiCategory,
										owlSameAsUri, lgdCategory);
								RdfWriter.writeNTriple(writer, csxPoiCategory,
										owlSameAsUri, lgdCategory);

								csxPoiConnection.add(lgdCategory, rdfsLabelUri,
										nameLiteral);
								RdfWriter.writeNTriple(writer, lgdCategory,
										rdfsLabelUri, nameLiteral);

								// Add superclass to new category:

								if (csxPoiSuperclass != null) {
									csxPoiConnection
											.add(csxPoiCategory,
													rdfsSubClassOfUri,
													csxPoiSuperclass);
									RdfWriter.writeNTriple(writer,
											csxPoiCategory, rdfsSubClassOfUri,
											csxPoiSuperclass);
								}

								// Add DBpedia equivalents to new category:

								List<DbpediaResource> dbpediaEquivalents = findDbpediaEquivalents(
										name, valueFactory);

								for (DbpediaResource dbpediaEquivalent : dbpediaEquivalents) {
									csxPoiConnection
											.add(csxPoiCategory, owlSameAsUri,
													dbpediaEquivalent.uri);
									RdfWriter.writeNTriple(writer,
											csxPoiCategory, owlSameAsUri,
											dbpediaEquivalent.uri);

									csxPoiConnection.add(dbpediaEquivalent.uri,
											rdfsLabelUri,
											dbpediaEquivalent.title);
									RdfWriter.writeNTriple(writer,
											dbpediaEquivalent.uri,
											rdfsLabelUri,
											dbpediaEquivalent.title);
								}

								writer.println();

							} finally {
								csxPoiConnection.close();
							}
						} catch (RepositoryException e) {
							e.printStackTrace();
						}
					}
				} finally {
					lgdResult.close();
				}
			} catch (RDFParseException e) {
				e.printStackTrace();
			} catch (QueryEvaluationException e) {
				e.printStackTrace();
			} catch (MalformedQueryException e) {
				e.printStackTrace();
			} finally {
				lgdConnection.close();
			}
		} catch (RepositoryException e) {
			e.printStackTrace();
		}
	}

	/**
	 * Converts resources from the LinkedGeoData vocabulary namespace to the
	 * csxPOI vocabulary.
	 * 
	 * @param lgdUri
	 *            the resource URI in the LinkedGeoData vocabulary
	 * @param valueFactory
	 *            a value factory from the repository
	 * @return the corresponding URI of the resource in the csxPOI vocabulary
	 * @throws UnsupportedEncodingException
	 */
	private URI convertNamespace(URI lgdUri, ValueFactory valueFactory)
			throws UnsupportedEncodingException {
		String name = URLDecoder.decode(lgdUri.getLocalName(), "UTF-8")
				.toLowerCase().replace("+", "_").trim();
		return valueFactory.createURI(Constants.NS_VOC
				+ URLEncoder.encode(name, "UTF-8"));
	}

	/**
	 * Extracts the name from the URI of a category.
	 * 
	 * @param uri
	 *            the URI of the category
	 * @return the name of the category
	 * @throws UnsupportedEncodingException
	 */
	private String extractName(URI uri) throws UnsupportedEncodingException {
		String name = URLDecoder.decode(uri.getLocalName(), "UTF-8").replace(
				"_", " ");
		if (!name.matches("^[a-zA-Z].*")) // starts with an alphabetical
			// character
			return null;
		return name;
	}

	/**
	 * Tests whether a category exists in the repository.
	 * 
	 * @param category
	 *            the category to test
	 * @param connection
	 *            a repository connection
	 * @return true if the category exists in the repository
	 * @throws RepositoryException
	 * @throws MalformedQueryException
	 * @throws QueryEvaluationException
	 */
	private boolean existsCategory(URI category, RepositoryConnection connection)
			throws RepositoryException, MalformedQueryException,
			QueryEvaluationException {

		boolean exists = false;

		try {
			BooleanQuery testQuery = connection.prepareBooleanQuery(
					QueryLanguage.SPARQL, "PREFIX base: <" + Constants.NS_BASE
							+ "> " + "PREFIX rdfs: <" + Constants.NS_RDFS
							+ "> " + "ASK WHERE {" + " <"
							+ category.stringValue()
							+ "> rdfs:subClassOf base:Poi ." + "}");
			if (testQuery.evaluate())
				exists = true;
		} finally {
			connection.close();
		}

		return exists;
	}

	/**
	 * Queries the public DBpedia SPARQL endpoint for resources with a given
	 * name as their title.
	 * 
	 * @param name
	 *            the name to search for
	 * @param valueFactory
	 *            a value factory from the repository
	 * @return a list of DBpedia resources with name as their title
	 */
	private List<DbpediaResource> findDbpediaEquivalents(String name,
			ValueFactory valueFactory) {

		// Capitalize the first character:
		String title = name.substring(0, 1).toUpperCase() + name.substring(1);

		List<DbpediaResource> dbpediaEquivalents = new ArrayList<DbpediaResource>();

		try {
			String query = "PREFIX rdfs: <" + Constants.NS_RDFS + ">"
					+ "SELECT DISTINCT ?resource WHERE"
					+ " { ?resource rdfs:label \"" + title + "\"@en . }";
			URL url = new URL("http://dbpedia.org/sparql"
					+ "?default-graph-uri="
					+ URLEncoder.encode("http://dbpedia.org", "UTF-8")
					+ "&query="
					+ URLEncoder.encode(query, "UTF-8")
					+ "&format="
					+ URLEncoder.encode("application/sparql-results+xml",
							"UTF-8") + "&timeout=3000");
			SAXParserFactory spf = SAXParserFactory.newInstance();
			SAXParser sp = spf.newSAXParser();
			XMLReader xr = sp.getXMLReader();
			DbpediaSaxHandler dbpediaSaxHandler = new DbpediaSaxHandler();
			xr.setContentHandler(dbpediaSaxHandler);
			xr.parse(new InputSource(url.openStream()));
			List<String> dbpediaEquivalentStrings = dbpediaSaxHandler.getUris();

			for (String equivalentString : dbpediaEquivalentStrings) {
				URI equivalent = valueFactory.createURI(equivalentString);
				URI xsdStringUri = valueFactory.createURI(Constants.NS_XSD
						+ "string");
				DbpediaResource resource = new DbpediaResource();
				resource.uri = equivalent;
				resource.title = valueFactory
						.createLiteral(title, xsdStringUri);
				if (!dbpediaEquivalents.contains(equivalent)) {
					dbpediaEquivalents.add(resource);
				}
			}

		} catch (MalformedURLException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} catch (ParserConfigurationException e) {
			e.printStackTrace();
		} catch (SAXException e) {
			e.printStackTrace();
		}

		return dbpediaEquivalents;
	}

	/**
	 * Parses the XML response from the DBpedia SPARQL endpoint.
	 */
	private class DbpediaSaxHandler extends DefaultHandler {

		/**
		 * the URIs contained in the response
		 */
		private List<String> mUris = new ArrayList<String>();

		/**
		 * the characters of the current tag
		 */
		private String mCurrentCharacters = "";

		/**
		 * the current binding
		 */
		private String mCurrentBinding = null;

		/**
		 * the constant for the binding tag
		 */
		private static final String BINDING_TAG = "binding";

		/**
		 * the constant for the URI tag
		 */
		private static final String URI_TAG = "uri";

		/**
		 * the constant for the name tag
		 */
		private static final String NAME_ATTRIBUTE = "name";

		/**
		 * Handles opening tags.
		 */
		@Override
		public void startElement(String uri, String localName, String qName,
				Attributes attributes) throws SAXException {
			if (qName.equals(BINDING_TAG)) {
				mCurrentBinding = attributes.getValue(NAME_ATTRIBUTE);
			}
		}

		/**
		 * Handles closing tags.
		 */
		@Override
		public void endElement(String uri, String localName, String qName)
				throws SAXException {
			if (mCurrentBinding != null && mCurrentBinding.equals("resource")) {
				if (qName.equals(URI_TAG)) {
					mUris.add(mCurrentCharacters);
				}
			}
			if (qName.equals(BINDING_TAG)) {
				mCurrentBinding = null;
			}
		}

		/**
		 * Handles characters of tags.
		 */
		@Override
		public void characters(char[] ch, int start, int length)
				throws SAXException {
			mCurrentCharacters = new String(ch, start, length);
		}

		/**
		 * Gets the URIs contained in the response
		 * 
		 * @return the URIs contained in the response
		 */
		public List<String> getUris() {
			return mUris;
		}
	}

	/**
	 * Represents a DBpedia resource.
	 */
	private class DbpediaResource {

		/**
		 * the URI of the DBpedia resource
		 */
		public URI uri = null;

		/**
		 * the title of the DBpedia resource
		 */
		public Literal title = null;
	}
}
