package org.nlp2rdf.navigator.server.tiger;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.log4j.Logger;
import org.dllearner.algorithms.refinement2.ROLComponent2;
import org.dllearner.core.EvaluatedDescription;
import org.dllearner.core.KnowledgeSource;
import org.dllearner.core.LearningAlgorithm;
import org.dllearner.core.LearningProblem;
import org.dllearner.core.ReasonerComponent;
import org.dllearner.core.configurators.ComponentFactory;
import org.dllearner.kb.OWLFile;
import org.dllearner.kb.sparql.Cache;
import org.dllearner.kb.sparql.SPARQLTasks;
import org.dllearner.kb.sparql.SparqlEndpoint;
import org.dllearner.kb.sparql.SparqlQueryDescriptionConvertVisitor;
import org.dllearner.learningproblems.PosNegLPStandard;
import org.dllearner.parser.ParseException;
import org.dllearner.reasoning.FastInstanceChecker;
import org.dllearner.utilities.Files;
import org.dllearner.utilities.URLencodeUTF8;
import org.dllearner.utilities.experiments.Examples;
import org.nlp2rdf.navigator.server.WorkerServletWrapper;
import org.nlp2rdf.navigator.util.LogHelper;
import org.slf4j.LoggerFactory;

import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.sparql.resultset.ResultSetRewindable;
import com.ibm.icu.util.StringTokenizer;

public class Worker2 implements IWorker2 {
	private static final Logger logger = Logger.getLogger(Worker2.class);
	
	//always return sentences
	static boolean debug = true;
	static boolean reasoning = false;
	static boolean useExpansion = false;
	
	SparqlEndpoint sparqlEndpoint;
	SPARQLTasks sparqlTasks;
	
	static String sparqlEndpointURL = "http://db0.aksw.org:8893/sparql";
	static String graph = "http://nlp2rdf.org/tiger";
	static String rulegraph = "http://nlp2rdf.org/schema/rules1";
	
	
	static String hasLemma = "http://nlp2rdf.org/ontology/hasLemma";
	static String hasToken = "http://nlp2rdf.org/ontology/hasToken";
	static String sentenceClass = "http://nlp2rdf.org/ontology/Sentence";
	static String tokenClass = "http://nlp2rdf.org/ontology/Token";
	
//	static String backgroundXML = "files/tiger.noSchema.noImports.rdf";
//	static String backgroundXML = "../data/tiger_trimmed_toPOS.rdf";	
	static String backgroundXML = "../data/VirtuosoSyntaxSchema.rdf";	
	static String sentenceXMLFolder = "../data/tiger/";	
	static String labelsFile = "../data/labels.txt";
	
//    static Map<String,String> subclassExpansion = Closure.makeFilter();	
    static Map<String,String> labels = loadLabels(labelsFile);	
    static Map<String,String> conceptLabels = OntologyLabels.getLabels();	
    
    private static org.slf4j.Logger l = LoggerFactory.getLogger(WorkerServletWrapper.class);
 
    public Worker2(){
    	try{
    		this.sparqlEndpoint = new SparqlEndpoint(
    				new URL(sparqlEndpointURL),
    				new ArrayList<String>(Arrays.asList(new String[]{graph})),
    				new ArrayList<String>());
    		this.sparqlTasks = new SPARQLTasks(Cache.getDefaultCache(),sparqlEndpoint);
    	}catch (Exception e) {
    		e.printStackTrace();
    	}
    }
    
    private static  Map<String,String> loadLabels(String file){
    	Map<String,String> labels = new HashMap<String, String>();
    	try{
    		String[] l = Files.readFileAsArray(new File(file));
    		for (int i = 0; i < l.length; i++) {
				try{
    			StringTokenizer st = new StringTokenizer(l[i],"\t");
				String uri = st.nextToken();
				String label = st.nextToken();
				labels.put(uri, label);
				}catch (Exception e) {
					System.out.println("ALARM ALARM misformed line in "+file);
					e.printStackTrace();	
				}
			}
    	}catch (Exception e) {
			e.printStackTrace();	
		}
    	return labels;
		
    }
	
	public static void main(String[] args) {
//		LogHelper.initLoggers();
		
		
		//runtime
		int maxSeconds = 10;
		int maxResults = 5;
		
		IWorker2 w = new Worker2();
		
		System.out.println(w.getLabelForURI("http://nlp2rdf.org/ontology/s50456"));
		
		SearchResult sr =  w.searchFulltext("wurde", maxResults);
		SearchResult sr2 =  w.searchLemmas("werden", maxResults);
		
		System.out.println(sr);
		System.out.println(sr2);
		
		//normally user chooses examples here we just take all;
		EvaluatedDescription d = w.learn(sr2.matching, sr2.notMatching, maxSeconds);

		//the next two lines can be presented to the user
		System.out.println(d.getAccuracy());
		System.out.println(d.getDescription().toManchesterSyntaxString(Config.prefix, null));
		System.out.println(d.getDescription().toKBSyntaxString());
		
		
		List<Sentence> retrievedSentences = w.getSentences(d, maxResults);
//		System.out.println(retrievedSentences.size());
		//this here does not work, don't know why....
		//maybe switch datatype to SortedSet or something similar
		retrievedSentences.removeAll(sr2.matching);
		retrievedSentences.removeAll(sr2.notMatching);
//		System.out.println(retrievedSentences.size());
		
		for (int i = 0; i < 5 && i<retrievedSentences.size(); i++) {
			System.out.println(retrievedSentences.get(i));
		}
		
//		String query = "select ?s ?l {?s a <http://nlp2rdf.org/ontology/Sentence>. ?s rdfs:label ?l}";
//		ResultSetRewindable rsw = sparqlTasks.queryAsResultSet(query);
//		StringBuffer b = new StringBuffer();
//		while (rsw.hasNext()){
//			QuerySolution qs = (QuerySolution) rsw.next();
//				Resource sentence = (Resource)qs.get("s");
//				Literal label = (Literal)qs.get("l");
//				b.append(sentence+"\t"+label+"\n");
//		}
//		Files.createFile(new File("labels.txt"), b.toString());
//		if (true) {
//			System.exit(0);
//		}
	}
	
	/* (non-Javadoc)
	 * @see org.nlp2rdf.navigator.server.IWorker2#searchFulltext(java.lang.String, int)
	 */
	public SearchResult searchFulltext(String search, int nrOfResults){
		
		String pre = "SELECT ?sentence ?label " +
				"FROM <"+graph+"> " +
				" { " +
				"?sentence a <"+sentenceClass+"> . " +
				"?sentence rdfs:label ?label. ";
		String post = "} LIMIT "+nrOfResults;
		
		String posQuery = pre +"FILTER (bif:contains (?label, '\""+search+"\"')) . "+post;
		String negQuery = pre +"FILTER (!regex (?label, '\""+search+"\"')) . "+post;

		List<Sentence> pos =_queryHelper(posQuery);
		List<Sentence> neg =_queryHelper(negQuery);
		return new SearchResult(pos, neg);
	}
	
	private List<Sentence> _queryHelper(String query){
			List<Sentence> ret = new ArrayList<Sentence>();
			ResultSetRewindable rsw =  sparqlTasks.queryAsResultSet(query);
			while (rsw.hasNext()){
				QuerySolution qs = rsw.next();
				RDFNode sentence = qs.get("sentence");
				RDFNode label = qs.get("label");
				if(sentence.isURIResource() && label.isLiteral()){
					ret.add(new Sentence(((Literal)label).getLexicalForm(), ((Resource)sentence).getURI()));
				}
			}
			return ret;
	}
	
	/* (non-Javadoc)
	 * @see org.nlp2rdf.navigator.server.IWorker2#searchLemmas(java.lang.String, int)
	 */
	public SearchResult searchLemmas(String search, int nrOfResults){
		String pre = "SELECT DISTINCT ?sentence ?label " +
			"FROM <"+graph+"> " +
			" { " +
			"?sentence a <"+sentenceClass+"> . " +
			"?sentence rdfs:label ?label. " +
			"?sentence <"+hasToken+"> ?token . " +
			"?token a <"+tokenClass+"> . " +
			"?token <"+hasLemma+"> ?lemma. ";
		String post = "} LIMIT "+nrOfResults;
		String posQuery = pre +"FILTER (bif:contains (?lemma, '\""+search+"\"')) . "+post;
		String negQuery = pre +"FILTER (!regex (?lemma, '\""+search+"\"')) . "+post;
		
		List<Sentence> pos =_queryHelper(posQuery);
		List<Sentence> neg =_queryHelper(negQuery);
		return new SearchResult(pos, neg);
	}
	
	/**
	 * TODO
	 * DLLEARNER takes as input fileURLs
	 * @param exampleURIs
	 * @return
	 */
	private static List<URL> sentencesToFileURLs(List<String> exampleURIs){
		String baseDir=sentenceXMLFolder;
		List<URL> u = new ArrayList<URL>();
		for (String exampleURI : exampleURIs) {
			try {
				u.add(new File(toFileName(baseDir, exampleURI)).toURI().toURL());
			} catch (MalformedURLException e) {
				e.printStackTrace();
			}
		}
		return u;
	}
	
	private static String toFileName(String baseDir, String exampleURI){
		return baseDir(baseDir)+URLencodeUTF8.encode(exampleURI);
	}

	private static String baseDir(String baseDir){
		return (baseDir.endsWith(File.separator))?baseDir:baseDir+File.separator;
	}
	
	/* (non-Javadoc)
	 * @see org.nlp2rdf.navigator.server.IWorker2#collect(java.util.List)
	 */
	@Deprecated
	public File collect(List<URL> urls){
//		String from = null;
		File tmpFile = null;
		FileWriter fw = null;
		try{
			tmpFile = File.createTempFile(Worker2.class.getSimpleName(), null);
			fw = new FileWriter(tmpFile);
			OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM, ModelFactory.createDefaultModel());
			for(URL one: urls){
				model.read(one.toString());
			}
			model.write(fw);
			
		}catch (Exception e) {
			e.printStackTrace();
		}finally{
				try {
					fw.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		
		return tmpFile;
	}
	
	/* (non-Javadoc)
	 * @see org.nlp2rdf.navigator.server.IWorker2#learn(java.util.List, java.util.List, int)
	 */
	public EvaluatedDescription learn(List<Sentence> pos, List<Sentence> neg, int maxSeconds){
//		int maxNrOfResults = 1;
		
		List<String> exampleURIs = new ArrayList<String>();
		EvaluatedDescription result = null;
		Examples ex = new Examples();
		for(Sentence ps : pos){
			ex.addPosTrain(ps.uri);
			exampleURIs.add(ps.uri);
		}
		for(Sentence ns : neg){
			ex.addNegTrain(ns.uri);
			exampleURIs.add(ns.uri);
		}
		
		Set<KnowledgeSource> tmp = new HashSet<KnowledgeSource>();
		
		try{
			
			List<URL> urls = sentencesToFileURLs(exampleURIs);
			URL add = new File(backgroundXML).toURI().toURL();
			urls.add(add);

			for(URL u: urls){
				OWLFile ks = ComponentFactory.getOWLFile(u);
				tmp.add(ks);
			}
			
			FastInstanceChecker rc = ComponentFactory.getFastInstanceChecker(tmp);
			PosNegLPStandard lp = ComponentFactory.getPosNegLPStandard(rc, ex.getPositiveExamples(), ex.getNegativeExamples());
			LearningAlgorithm la = _getROLLearner(lp,  rc,  ex, maxSeconds);
			
			for(KnowledgeSource ks:tmp){
				ks.init();
			}
			
//			ks.init();
			rc.init();
			lp.init();
			la.init();
			la.start();

			result = la.getCurrentlyBestEvaluatedDescription();
		}catch (Exception e) {
			e.printStackTrace();
		}
		return result;
	}
	
	/* (non-Javadoc)
	 * @see org.nlp2rdf.navigator.server.IWorker2#getSentences(org.dllearner.core.EvaluatedDescription, int)
	 */
	public List<Sentence> getSentences(EvaluatedDescription ed, int resultLimit){
		
		SparqlQueryDescriptionConvertVisitor visit = new SparqlQueryDescriptionConvertVisitor();
		visit.setDistinct(true);
		visit.setLabels(false);
		visit.setLimit(resultLimit);

		String sparqlQuery = ""; 
		try {
			sparqlQuery = visit.getSparqlQuery(ed.getDescription().toKBSyntaxString());
		} catch (ParseException e1) {
			e1.printStackTrace();
		}
			sparqlQuery = " \n define input:inference \""+rulegraph+"\" \n" + ""+sparqlQuery;

		List<String> sentenceURIs = new ArrayList<String>();
		ResultSetRewindable rsw = sparqlTasks.queryAsResultSet(sparqlQuery);
		while (rsw.hasNext()){
			QuerySolution qs = rsw.next();
			RDFNode sentence = qs.get("subject");
			if(sentence.isURIResource() ){
				sentenceURIs.add(((Resource)sentence).getURI());
			}else{
				logger.warn("getSentences retrieved something strange: "+sparqlQuery);
			}
		}
		
		List<Sentence> sentences = new ArrayList<Sentence>();
		for(int i = 0; i< sentenceURIs.size(); i++){
			sentences.add(new Sentence(getLabelForURI(sentenceURIs.get(i)), sentenceURIs.get(i)));
		}
		return sentences;
//		List<Sentence> result = _getLabelsMakeObjects( sentenceURIs);
//		if(debug && result.isEmpty()){
//			return searchFulltext("unter", resultLimit).getMatching();
//		}else {
//			return result;
//		}
		
	}
	
	/* (non-Javadoc)
	 * @see org.nlp2rdf.navigator.server.IWorker2#getSentencesNew(org.dllearner.core.EvaluatedDescription, int)
	 */
	public RetrievalResult getSentencesNew(EvaluatedDescription ed, int resultLimit){
		
		SparqlQueryDescriptionConvertVisitor visit = new SparqlQueryDescriptionConvertVisitor();
		visit.setDistinct(true);
		visit.setLabels(false);
		visit.setLimit(-1);

		String sparqlQuery = ""; 
		try {
			sparqlQuery = visit.getSparqlQuery(ed.getDescription().toKBSyntaxString());
		} catch (ParseException e1) {
			e1.printStackTrace();
		}
			sparqlQuery = " \n define input:inference \""+rulegraph+"\" \n" + ""+sparqlQuery;

		List<String> sentenceURIs = new ArrayList<String>();
		ResultSetRewindable rsw = sparqlTasks.queryAsResultSet(sparqlQuery);
		while (rsw.hasNext()){
			QuerySolution qs = rsw.next();
			RDFNode sentence = qs.get("subject");
			if(sentence.isURIResource() ){
				sentenceURIs.add(((Resource)sentence).getURI());
			}else{
				logger.warn("getSentences retrieved something strange: "+sparqlQuery);
			}
		}
		
		RetrievalResult retrievalResult = new RetrievalResult();
		retrievalResult.setNrOfResults(sentenceURIs.size());
		List<Sentence> sentences = new ArrayList<Sentence>();
		for(int i = 0; i< resultLimit && i< sentenceURIs.size(); i++){
			sentences.add(new Sentence(getLabelForURI(sentenceURIs.get(i)), sentenceURIs.get(i)));
		}
		retrievalResult.setSentences(sentences);
		return retrievalResult;
	}
	
	
	
	
	public String getLabelForURI(String uri){
		String label = labels.get(uri);
		return (label==null)?"":label;
	}
	
	/**
	 * returns the labels for displaying as tooltip
	 * 
	 * @param name e.g. VVPP, POS, KOUS
	 * @return
	 */
	public String getLabelsForConcept(String name){
		String label = conceptLabels.get(name);
		return (label==null)?"":label;
	}
	
	
	@SuppressWarnings("unused")
	private List<Sentence> _getLabelsMakeObjects(List<String> sentenceURIs){
		String query = "";
		List<Sentence> result = new ArrayList<Sentence>();
		int i = 0;
		for (String sentenceURI : sentenceURIs) {
			if(i>50){
				result.add(new Sentence("Something, Something, Something, Dark Side", sentenceURI));
			}else{
				query = "SELECT * FROM <"+graph+"> " +
				"{ <"+sentenceURI+"> rdfs:label ?label . }";
				ResultSetRewindable rsw = sparqlTasks.queryAsResultSet(query);
				while (rsw.hasNext()){
					QuerySolution qs = rsw.next();
					RDFNode label = qs.get("label");
					if(label.isLiteral()){
						result.add(new Sentence(((Literal)label).getLexicalForm(), sentenceURI));
					}else{
						logger.warn("_getLabelsMakeObjects retrieved something strange: "+query);
					}
				}
			}
			i++;
		}
		return result;
	}
	
	private LearningAlgorithm _getROLLearner(LearningProblem lp, ReasonerComponent rc, Examples ex, int maxExecutionTime) throws Exception {
		ROLComponent2 la = ComponentFactory.getROLComponent2(lp, rc);
		int valueFrequencyThreshold = ex.getPosTrain().size();
		valueFrequencyThreshold = ex.getPosTrain().size();
		
		la.getConfigurator().setUseExistsConstructor(true);
		la.getConfigurator().setUseAllConstructor(false);
		la.getConfigurator().setUseCardinalityRestrictions(false);
		la.getConfigurator().setUseNegation(false);
		la.getConfigurator().setUseHasValueConstructor(false);
		la.getConfigurator().setUseDataHasValueConstructor(true);
		la.getConfigurator().setValueFrequencyThreshold(valueFrequencyThreshold);
		
		la.getConfigurator().setNoisePercentage(0);
		la.getConfigurator().setTerminateOnNoiseReached(true);
		
		la.getConfigurator().setStartClass(Config.prefix+"Sentence");
		la.getConfigurator().setMaxExecutionTimeInSeconds(maxExecutionTime);

//		 la.getConfigurator().setWriteSearchTree(true);
//		la.getConfigurator().setSearchTreeFile("log/searchTree.txt");
//		la.getConfigurator().setReplaceSearchTree(true);

		la.getConfigurator().setIgnoredConcepts(new HashSet<String>(Arrays.asList(new String[]{
				"http://nlp2rdf.org/ontology/sentencefinalpunctuation_tag",
				"http://nlp2rdf.org/ontology/comma_tag",
				"http://nachhalt.sfb632.uni-potsdam.de/owl/stts.owl#SentenceFinalPunctuation",
				"http://nachhalt.sfb632.uni-potsdam.de/owl/stts.owl#SentenceInternalPunctuation"
		})));
		
		return la;
	}
	
}
