package com.fluidops.datacatalog.reltables.acsDB;

import java.util.Map;
import java.util.Set;

import org.apache.log4j.Logger;
import org.apache.lucene.analysis.StopAnalyzer;
import org.openrdf.model.Literal;
import org.openrdf.query.BindingSet;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQuery;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;

import com.fluidops.datacatalog.reltables.util.LRUCache;
import com.fluidops.datacatalog.reltables.util.stemmer.SnowballStemmer;
import com.fluidops.datacatalog.reltables.util.stemmer.englishStemmer;

public class ACSDB {

	private static final Logger logger = Logger.getLogger(ACSDB.class.getName());

	public static final String SINGLE_SUBJECT_NODE = "http://acsDB.singleOccurrence/";
	public static final String COMBO_SUBJECT_NODE = "http://acsDB.comboOccurrence/";

	public static final String HAS_SINGLE_COLUMN = "http://acsDB/single/hasColumn/";
	public static final String HAS_COMBO_COLUMN = "http://acsDB/combo/hasColumn/";
	
	public static final String HAS_SCORE = "http://acsDB.hasScore/";
	public static final String HAS_PERCENTAGE = "http://acsDB.hasPercentage/";

	private Repository acsDB;
	private RepositoryConnection con;

	private SnowballStemmer stemmer;
	private Set<String> stopWords;
		
	private static final int FREQUENCY_CACHE_SIZE = 1000;
	private Map<String, Integer> singleFreqencyCache;
	
	private static final int CLEANING_CACHE_SIZE = 1000;
	private Map<String, String> cleaningCache;

	@SuppressWarnings({ "unchecked", "rawtypes" })
	public ACSDB(Repository repo){
		this.acsDB = repo;
		try {
			this.con = this.acsDB.getConnection();
		} catch (RepositoryException e) {
			logger.warn("Could not instance connection of the acsDB. ", e);
		}
		try{
			Class stemClass = Class.forName(englishStemmer.class.getName());
			this.stemmer = (SnowballStemmer) stemClass.newInstance();
		}catch(ClassNotFoundException e){
			logger.warn("Could not instantiate stemmer for acsDB.", e);
		} catch (InstantiationException e) {
			logger.warn("Could not instantiate stemmer for acsDB.", e);
		} catch (IllegalAccessException e) {
			logger.warn("Could not instantiate stemmer for acsDB.", e);
		}

		this.stopWords = (Set<String>) StopAnalyzer.ENGLISH_STOP_WORDS_SET;

		this.cleaningCache = new LRUCache<String, String>(CLEANING_CACHE_SIZE);
		
		this.singleFreqencyCache = new LRUCache<String, Integer>(FREQUENCY_CACHE_SIZE);
	}

	
	public int getSingleFrequency(final String schema){
		if(!this.singleFreqencyCache.containsKey(schema)){
			String cleanSchema = cleanSchema(schema);
			int frequency = querySingleFrequency(cleanSchema);

			this.singleFreqencyCache.put(schema, frequency);
		}

		return this.singleFreqencyCache.get(schema);
	}
	


	public int getPairwiseFrequency(final String schema1, final String schema2){

		String cleanedSchema1 = cleanSchema(schema1);
		String cleanedSchema2 = cleanSchema(schema2);

		//int schemaScore = queryPairwiseFrequency(cleanedSchema1, cleanedSchema2);
		int schemaScore = queryPairwiseFrequencyOptimized(cleanedSchema1, cleanedSchema2);
		return schemaScore;
	}


	private int querySingleFrequency(String schema){
		int frequency = 0;
		/*
		 * possible optimization hot spot...
		 * could be done by adding a special property for combo has_column 
		 * omitting the check for single subject nodes... 
		 */
		StringBuilder query = new StringBuilder();
		query.append("SELECT ?score ");
		query.append(" WHERE { ")
		.append(" ?node <").append(HAS_SINGLE_COLUMN).append("> \"").append(schema).append("\" ; ");
								query.append("<").append(HAS_SCORE).append("> ?score . } LIMIT 1 ");

		try {
			TupleQuery tquery = this.con.prepareTupleQuery(QueryLanguage.SPARQL, query.toString());
			TupleQueryResult result = null;
			try{
				result = tquery.evaluate();
				BindingSet bs;
				while(result.hasNext()){
					bs = result.next();
					frequency += ((Literal) bs.getValue("score")).intValue();
				}
			}finally{
				if(result != null){
					result.close();
				}
			}

		} catch (RepositoryException e) {
			logger.warn("RepositoryException while querying the acsDB ", e);
		} catch (MalformedQueryException e) {
			logger.warn("Malformed query for acsDB " + query.toString());
		} catch (QueryEvaluationException e) {
			logger.warn("Could not evaluate query to acsDB ", e);
		}							

		return frequency;
	}
	
	
	
	private int queryPairwiseFrequencyOptimized(String cleanedSchema1, String cleanedSchema2){
		int frequency = 0;

		StringBuilder query = new StringBuilder();
		query.append("SELECT (SUM(?score) AS ?totalScore) ");
		query.append(" WHERE { ")
		.append(" ?node <").append(HAS_COMBO_COLUMN).append("> \"").append(cleanedSchema1).append("\" ; ");
								query.append(" <").append(HAS_COMBO_COLUMN).append("> \"").append(cleanedSchema2).append("\" ; ");
								query.append("<").append(HAS_SCORE).append("> ?score . } GROUP BY ?node ");

		try {
			TupleQuery tquery = this.con.prepareTupleQuery(QueryLanguage.SPARQL, query.toString());
			TupleQueryResult result = null;
			try{
				result = tquery.evaluate();
				BindingSet bs;
				while(result.hasNext()){
					bs = result.next();
					frequency += ((Literal) bs.getValue("totalScore")).intValue();
				}
			}finally{
				if(result != null){
					result.close();
				}
			}

		} catch (RepositoryException e) {
			logger.warn("RepositoryException while querying the acsDB ", e);
		} catch (MalformedQueryException e) {
			logger.warn("Malformed query for acsDB " + query.toString(), e);
		} catch (QueryEvaluationException e) {
			logger.warn("Could not evaluate query to acsDB ", e);
		}							

		return frequency;
	}
	
	
	private int queryPairwiseFrequency(String cleanedSchema1, String cleanedSchema2){
		int frequency = 0;

		StringBuilder query = new StringBuilder();
		query.append("SELECT ?score ");
		query.append(" WHERE { ")
		.append(" ?node <").append(HAS_COMBO_COLUMN).append("> \"").append(cleanedSchema1).append("\" ; ");
								query.append(" <").append(HAS_COMBO_COLUMN).append("> \"").append(cleanedSchema2).append("\" ; ");
								query.append("<").append(HAS_SCORE).append("> ?score . } ");

		try {
			TupleQuery tquery = this.con.prepareTupleQuery(QueryLanguage.SPARQL, query.toString());
			TupleQueryResult result = null;
			try{
				result = tquery.evaluate();
				BindingSet bs;
				while(result.hasNext()){
					bs = result.next();
					frequency += ((Literal) bs.getValue("score")).intValue();
				}
			}finally{
				if(result != null){
					result.close();
				}
			}

		} catch (RepositoryException e) {
			logger.warn("RepositoryException while querying the acsDB ", e);
		} catch (MalformedQueryException e) {
			logger.warn("Malformed query for acsDB " + query.toString());
		} catch (QueryEvaluationException e) {
			logger.warn("Could not evaluate query to acsDB ", e);
		}							

		return frequency;
	}


	private String cleanSchema(final String schema){
		if(!this.cleaningCache.containsKey(schema)){
			String cleanedSchema = deleteBrackets(schema);
			cleanedSchema = cleanedSchema.toLowerCase();
			cleanedSchema = cleanedSchema.replaceAll("\\p{Punct}", "");
			cleanedSchema = cleanedSchema.replaceAll("\\s+", " ");
			cleanedSchema = removeStopWords(cleanedSchema);
	//		cleanedSchema = stem(cleanedSchema);
			cleanedSchema = cleanedSchema.trim();
			
			this.cleaningCache.put(schema, cleanedSchema);
		}
		return this.cleaningCache.get(schema);
	}


	private String removeStopWords(String s){
		StringBuilder sb = new StringBuilder();
		String[] singleWords = s.split(" ");
		for(String word : singleWords){
			if(!this.stopWords.contains(word)){
				sb.append(word);
				sb.append(" ");
			}
		}
		if(sb.length() > 0){
			// delete last space
			sb.deleteCharAt(sb.length() - 1);
		}
		return sb.toString();
	}

	private String stem(String s){
		this.stemmer.setCurrent(s);
		this.stemmer.stem();
		return this.stemmer.getCurrent();
	}

	private String deleteBrackets(final String s){
		StringBuilder sb = new StringBuilder(s);
		int open = sb.indexOf("(");
		int close = sb.indexOf(")");

		while(open != -1 && close != -1){

			sb.delete(open, close + 1);

			// still any brackets left?
			open = sb.indexOf("(");
			close = sb.indexOf(")");
		}
		return sb.toString();

	}


	public void shutdown(){
		try {
			if(this.con.isOpen()){
				this.con.close();
			}
			this.acsDB.shutDown();
		} catch (RepositoryException e) {
			logger.warn("Could not shutdown the acsDB. ", e);
		}
	}
}
