package es.unavarra.mti.iws.engine;

import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;

import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;

import es.unavarra.mti.iws.EngineInterface;
import es.unavarra.mti.iws.gui.MainFrame;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.io.FileWriter;

public class Engine implements EngineInterface {
    private static final String path = "/es/unavarra/mti/iws/resources/";
    private static final String defaultNameSpace = "http://www.semanticweb.org/ontologies/2010/10/RedDeNodos.owl#";
    private static final String LimitBurned = ">=60";
    private static final String LimitWet = ">=100";
    private static final String LimitFallen = ">45";

    private OntModel model;

    private InputStream in;
    public Engine() {}

    @Override
    public void init() {
        in = Engine.class.getResourceAsStream(path + "RedDeNodos.owl");
        // create an empty model
        model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
        // read the RDF/XML file
        model.read(in, "");
        MainFrame.getInstance().log("Engine init");
    }

    public void addNode(String id, String type) {
        if (type.equals("Imote2")) {
            model.createIndividual(Vocabulary.getURI() + id, Vocabulary.IMOTE2).
                    addProperty(Vocabulary.hasSdramMemory, "32000", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasFlashMemory, "32000", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasSramMemory, "256", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasTasaDatosTi, "250", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasSensibilidadTi, "-94", Vocabulary.getRDFDatatype("integer"));
        } else {
            model.createIndividual(Vocabulary.getURI() + id, Vocabulary.IRIS).
                    addProperty(Vocabulary.hasRam, "8", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasProgramFlashMemory, "128", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasSerialFlash, "512", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasEeprom, "4", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasTasaDatos, "250", Vocabulary.getRDFDatatype("integer")).
                    addProperty(Vocabulary.hasSensibilidad, "-101", Vocabulary.getRDFDatatype("integer"));
        }
        MainFrame.getInstance().log("Added " + id + " type " + type);
    }

    public void delNode(String id) {
        model.getIndividual(Vocabulary.getURI() + id).remove();
        MainFrame.getInstance().log("Deleted " + id);
    }

    public void addLink(String id1, String id2) {
        model.getIndividual(Vocabulary.getURI() + id1).addProperty(Vocabulary.isVecinoOf, model.getOntResource(Vocabulary.getURI() + id2));
        model.getIndividual(Vocabulary.getURI() + id2).addProperty(Vocabulary.isVecinoOf, model.getOntResource(Vocabulary.getURI() + id1));
        MainFrame.getInstance().log("Link  " + id1 + " <-> " + id2);
    }

    public void delLink(String id1, String id2) {
        model.getIndividual(Vocabulary.getURI() + id1).removeProperty(Vocabulary.isVecinoOf, model.getOntResource(Vocabulary.getURI() + id2));
        model.getIndividual(Vocabulary.getURI() + id2).removeProperty(Vocabulary.isVecinoOf, model.getOntResource(Vocabulary.getURI() + id1));
        MainFrame.getInstance().log("Unlink  " + id1 + " <-> " + id2);
    }

    public void setTemp(String id, float temp) {
        if (model.getIndividual(Vocabulary.getURI() + id).hasProperty(Vocabulary.hasTemperatura)) {
            model.getIndividual(Vocabulary.getURI() + id).getProperty(Vocabulary.hasTemperatura).changeLiteralObject(temp);
        } else {
            model.getIndividual(Vocabulary.getURI() + id).addProperty(Vocabulary.hasTemperatura, String.valueOf(temp), Vocabulary.getRDFDatatype("float"));
        }
        MainFrame.getInstance().log("Temperatura  " + id + " = " + temp);
    }

    public void setRain(String id, float wet) {
        if (model.getIndividual(Vocabulary.getURI() + id).hasProperty(Vocabulary.hasHumedad)) {
            model.getIndividual(Vocabulary.getURI() + id).getProperty(Vocabulary.hasHumedad).changeLiteralObject(wet);
        } else {
            model.getIndividual(Vocabulary.getURI() + id).addProperty(Vocabulary.hasHumedad, String.valueOf(wet), Vocabulary.getRDFDatatype("float"));
        }
        MainFrame.getInstance().log("Lluvia  " + id + " = " + wet);
    }

    public void setTilt(String id, float tilt) {
        if (model.getIndividual(Vocabulary.getURI() + id).hasProperty(Vocabulary.hasInclinacion)) {
            model.getIndividual(Vocabulary.getURI() + id).getProperty(Vocabulary.hasInclinacion).changeLiteralObject(tilt);
        } else {
            model.getIndividual(Vocabulary.getURI() + id).addProperty(Vocabulary.hasInclinacion, String.valueOf(tilt), Vocabulary.getRDFDatatype("float"));
        }
        MainFrame.getInstance().log("Terremoto  " + id + " = " + tilt);
    }

    public synchronized  List<String> querySPARQL(String queryString, String param) {
        ArrayList<String> list = new ArrayList<String>();
        Query query = QueryFactory.create(queryString);
        QueryExecution qe = QueryExecutionFactory.create(query, model);
        ResultSet rs = qe.execSelect();
        while (rs.hasNext()) {
            QuerySolution binding = rs.nextSolution();
            list.add(binding.get(param).toString());
        }
        qe.close();

        return list;
    }

    public synchronized List<List<String>> querySPARQL(String queryString, String param1, String param2) {
        List<List<String>> list = new ArrayList<List<String>>();

        Query query = QueryFactory.create(queryString);
        QueryExecution qe = QueryExecutionFactory.create(query, model);
        ResultSet rs = qe.execSelect();
        while (rs.hasNext()) {
            QuerySolution binding = rs.nextSolution();
            List<String> inside = new ArrayList<String>();
            if (param1.isEmpty()) {
                inside.add("");
            } else {
                inside.add(binding.get(param1).toString());
            }
            if (param2.isEmpty()) {
                inside.add("");
            } else {
                inside.add(binding.get(param2).toString());
            }
            list.add(inside);
        }
        qe.close();

        return list;
    }

    public String prepareQuery(boolean subjectCheck, boolean objectCheck, String subject, String predicate, String object, String filter, String condition, String value) {
        String p = new String();
        if (predicate.equals("Vecino")) {
            p = "foaf:isVecinoOf";
        } else if (predicate.equals("Temperatura")) {
            p = "foaf:hasTemperatura";
        } else if (predicate.equals("Humedad")) {
            p = "foaf:hasHumedad";
        } else if (predicate.equals("Inclinacion")) {
            p = "foaf:hasInclinacion";
        } else if (predicate.equals("Tipo")) {
            p = "a";
        }
        
        String f = new String();
        if (!filter.isEmpty() && !value.isEmpty()) {
            f = " FILTER (" + filter + " " + condition + " " + value + ") ";
        } else {
            f = "";
        }

        String subjectSelect = subject;
        String objectSelect = object;
        if (!subjectCheck) {
            subjectSelect = "";
        }
        if (!objectCheck) {
            objectSelect = "";
        }

        String queryString = "PREFIX foaf: <" + defaultNameSpace + ">"
            + " \nSELECT " + subjectSelect + " " + objectSelect
            + " \nWHERE { "
            + " \n\t " + subject + " " + p + " " + object
            + f
            + " \n}";
        
        return queryString;
    }

    public List<String> burnedNodes() {
        String queryString = "PREFIX foaf: <" + defaultNameSpace + ">"
            + " SELECT ?nodo"
            + " WHERE { "
            + "     ?nodo foaf:hasTemperatura  ?temp FILTER (?temp " + LimitBurned + ")"
            + " }";

        return querySPARQL(queryString, "nodo");
    }

    public List<String> fallenNodes() {
        String queryString = "PREFIX foaf: <" + defaultNameSpace + ">"
            + " SELECT ?nodo"
            + " WHERE { "
            + "     ?nodo foaf:hasInclinacion  ?tilt FILTER (?tilt " + LimitFallen + ")"
            + " }";

        return querySPARQL(queryString, "nodo");
    }

    public List<String> wetNodes() {
        String queryString = "PREFIX foaf: <" + defaultNameSpace + ">"
            + " SELECT ?nodo"
            + " WHERE { "
            + "     ?nodo foaf:hasHumedad  ?wet FILTER (?wet " + LimitWet + ")"
            + " }";

        return querySPARQL(queryString, "nodo");
    }

    public boolean isAnyOnFire() {
        String queryString = "PREFIX foaf:<" + defaultNameSpace + ">"
            + " SELECT ?nodo1 ?temp1 ?nodo2 ?temp2"
            + " WHERE {"
            + "     ?nodo1 foaf:hasTemperatura ?temp1 FILTER (?temp1 " + LimitBurned + ") ."
            + "     ?nodo1 foaf:isVecinoOf ?nodo2 ."
            + "     ?nodo2 foaf:hasTemperatura ?temp2 FILTER (?temp2 " + LimitBurned + ")"
            + "}";

        List<String> list = querySPARQL(queryString, "nodo1");
        return !list.isEmpty();
    }

    public boolean isAnyWet() {
        String queryString = "PREFIX foaf:<" + defaultNameSpace + ">"
            + " SELECT ?nodo1 ?wet1 ?nodo2 ?wet2"
            + " WHERE {"
            + "     ?nodo1 foaf:hasHumedad ?wet1 FILTER (?wet1 " + LimitWet + ") ."
            + "     ?nodo1 foaf:isVecinoOf ?nodo2 ."
            + "     ?nodo2 foaf:hasHumedad ?wet2 FILTER (?wet2 " + LimitWet + ")"
            + "}";

        List<String> list = querySPARQL(queryString, "nodo1");
        return !list.isEmpty();
    }

    public boolean isAnyQuake() {
        String queryString = "PREFIX foaf:<" + defaultNameSpace + ">"
            + " SELECT ?nodo1 ?temp1 ?nodo2 ?temp2"
            + " WHERE {"
            + "     ?nodo1 foaf:hasInclinacion ?temp1 FILTER (?temp1 " + LimitFallen + ") ."
            + "     ?nodo1 foaf:isVecinoOf ?nodo2 ."
            + "     ?nodo2 foaf:hasInclinacion ?temp2 FILTER (?temp2 " + LimitFallen + ")"
            + "}";

        List<String> list = querySPARQL(queryString, "nodo1");
        return !list.isEmpty();
    }

    public synchronized void saveOntology(String file, String type) {
        try {
            model.write(new FileWriter(file), type);
        } catch (Exception e) {
            System.out.println("Error: " + e);
        }
    }

    public synchronized OntModel getModel(){
        return model;
    }
}
