package sk.datalan.similarity;

import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.openrdf.model.Statement;
import org.openrdf.model.impl.StatementImpl;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.query.BindingSet;
import org.openrdf.query.GraphQuery;
import org.openrdf.query.GraphQueryResult;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQuery;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;

import sk.datalan.core.DatabaseManager;
import sk.datalan.core.Utils;
import sk.datalan.similarity.predicates.Predicate;

public final class Process implements Runnable {
	private Map<String, Individual> individuals = new HashMap<String, Individual>( 100000 );
	private String realestateType;

	/*
	 * Input string refers to rdf:type of RealEstate e.g. OneBedroomApartment, House etc.
	 */
	public Process( String realestate ) {
		this.realestateType = realestate;
	}

	@Override
	public void run() {
		hashImages( this.realestateType ); //calculate image hash for each image and store it in repository
		process( this.realestateType );
		compareAndClean();
	}

	private void hashImages( String realestateType ) {
		RepositoryConnection con = null;
		BindingSet s = null;
		List<Statement> statements = new ArrayList<Statement>( 150000 );
		try {
			System.out.println( "Zaciatok hashovania imageov pre process " + realestateType );
			con = DatabaseManager.getInstance().getRepository().getConnection();
			String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>  " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> "
					+ "SELECT ?individual ?image WHERE { ?individual rdf:type <http://www.datalan.sk/semweb/ontology/realestate/1.0/realestate.owl#"
					+ realestateType + "> . ?individual <http://www.datalan.sk/semweb/ontology/realestate/1.0/realestate.owl#image> ?image }";
			TupleQuery q = con.prepareTupleQuery( QueryLanguage.SPARQL, query );
			q.setIncludeInferred( true );
			TupleQueryResult result = q.evaluate();

			int rows = 0;
			
			con.setAutoCommit( false );

			long time = System.currentTimeMillis();
			while ( result.hasNext() ) {
				s = result.next();
				File f = new File( s.getBinding( "image" ).getValue().stringValue() );
				String imageHash = ImageSimilarityProcessor.getInstance().calculateImageHash( f );
				statements.add( new StatementImpl( new URIImpl( s.getBinding( "individual" ).getValue().stringValue() ), new URIImpl(
						"http://www.datalan.sk/semweb/ontology/realestate/1.0/realestate.owl#imageHash" ), ValueFactoryImpl.getInstance()
						.createLiteral( imageHash ) ) );

				rows++;

			}
			Utils.log( realestateType + " - pocet vytvorenych image hashov:" + rows + " za cas : " + ( System.currentTimeMillis() - time ) + "ms" );
			Utils.log( realestateType + " - Zaciatok insertovania hashov do DB" );
			time = System.currentTimeMillis();

			con.add( statements );
			con.commit();

			result.close();
			DatabaseManager.getInstance().flush();

			Utils.log( realestateType + " - Cas vlozenia " + rows + " hashov do DB: " + ( System.currentTimeMillis() - time ) + "ms" );

		} catch ( RepositoryException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch ( NumberFormatException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch ( QueryEvaluationException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch ( MalformedQueryException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} finally {
			try {
				con.close();
			} catch ( RepositoryException e ) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}

	}

	/*
	 * Porovnava individua a zistuje nakolko su totozne
	 */
	private void compareAndClean() {
		long time = System.currentTimeMillis();
		Individual individual;
		HashMap<String, String> matchingCandidates = new HashMap<String, String>( 1000000 );
		List<String> l = new ArrayList<String>( 1000000 );
		int iteration = 0;
		Individual candidate;
		double sim = 0;
		for ( String key : individuals.keySet() ) {
			individual = individuals.get( key );

			if ( matchingCandidates.containsKey( key ) ) {
				continue;
			}

			for ( String c : individual.getCandidateKeys() ) {
				candidate = individuals.get( c );
				if ( candidate == null || c.equals( key ) ) {
					continue;
				}
				sim = individual.getSimilarityIndex( candidate );
				if ( sim < 1.1 ) { //ja so to tak stanovil ze od akeho similarity Indexu sa to berie ze dve individua su totozne
					matchingCandidates.put( candidate.getId(), individual.getId() );
				} else {
					candidate.reset();
				}

				candidate.getCandidateKeys().remove( individual.getId() );
				iteration++;
			}
		}
		System.out.println( "Matching candidates : " + matchingCandidates.size() );

		for ( String k : matchingCandidates.keySet() ) {
			l.addAll( deleteRedundantTriples( individuals.get( k ), individuals.get( matchingCandidates.get( k ) ) ) );
		}
		System.out.println( "Pocet porovnani individui: " + iteration + "\nCelkovy pocet porovnani: "+Individual.count+"\nPocet tripletov na vymazanie : " + l.size() + "\nCelkovy cas: "
				+ ( System.currentTimeMillis() - time ) + "ms" );

	}

	/*
	 * Nedokoncena metoda, ktora bude vymazavat redundantne triplety, ktore by vznikli pri zlucovani individui s owl:sameAs. Hlavna logika je ale implementovane. Uz len chyba to samotne vymazavanie na databazu.
	 */
	private List<String> deleteRedundantTriples( Individual master, Individual slave ) {
		List<String> deleteTriples = new ArrayList<String>( 30 );
		StringBuffer b = new StringBuffer( 500 );
		for ( String key : slave.getPredicates().keySet() ) {
			for ( Predicate p : slave.getPredicates().get( key ) ) {
				if ( p.hasMatch() ) {
					b.append( slave.getId() );
					b.append( " " );
					b.append( p.getId() );
					b.append( " " );
					b.append( p.getValue() );
					deleteTriples.add( b.toString() );
					b.delete( 0, b.length() - 1 );
				}
			}
		}
		// TODO double address deleting

		return deleteTriples;
	}

	/*
	 * Mapuje predikaty na individua a vytvara tak hashMapu Individui s predikatmi
	 */
	private void process( String realestateType ) {
		long time = System.currentTimeMillis();
		RepositoryConnection con;
		Statement s = null;
		String individualURI;
		try {
			System.out.println( "Zaciatok spracovanie vysledkov processu " + realestateType );
			con = DatabaseManager.getInstance().getRepository().getConnection();
			String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>  " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> "
					+ "CONSTRUCT { ?a ?p ?b }  WHERE { ?a rdf:type <http://www.datalan.sk/semweb/ontology/realestate/1.0/realestate.owl#"
					+ realestateType + "> ."
					+ " ?a ?p ?b }";
			GraphQuery g = con.prepareGraphQuery( QueryLanguage.SPARQL, query );
			g.setIncludeInferred( false );
			GraphQueryResult result = g.evaluate();

			int rows = 0;
			while ( result.hasNext() ) {
				s = result.next();
				individualURI = s.getSubject().stringValue();
				if ( getIndividuals().containsKey( individualURI ) ) {
					getIndividuals().get( individualURI ).addElement( s );
				} else {
					Individual i = new Individual( individualURI, s );
					getIndividuals().put( individualURI, i );
				}
				rows++;

			}
			result.close();

			con.close();

			System.out.println( "Query time + processing : "+(System.currentTimeMillis()-time)+"ms\nPocet individui " + getIndividuals().size() + "\nPocet zaznamov:" + rows );

		} catch ( RepositoryException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch ( NumberFormatException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch ( QueryEvaluationException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch ( MalformedQueryException e ) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	protected Map<String, Individual> getIndividuals() {
		return individuals;
	}
}
