
package org.okkam.refine.model;


import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;

import org.apache.jena.fuseki.http.UpdateRemote;

import org.okkam.refine.controller.ApplicationController;
import org.openrdf.model.BNode;
import org.openrdf.model.Resource;
import org.openrdf.model.ValueFactory;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.sail.memory.MemoryStore;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.sparql.modify.request.QuadDataAcc;
import com.hp.hpl.jena.sparql.modify.request.UpdateDataInsert;
import com.hp.hpl.jena.update.Update;
import com.hp.hpl.jena.update.UpdateRequest;



import com.google.refine.browsing.Engine;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RowVisitor;
import com.google.refine.model.Project;
import com.google.refine.model.Row;
import com.google.refine.rdf.Node;
import com.google.refine.rdf.RdfSchema;
import com.google.refine.rdf.Util;
import com.google.refine.rdf.app.ApplicationContext;
import com.google.refine.rdf.vocab.Vocabulary;
import com.google.refine.rdf.vocab.VocabularyIndexException;

import de.fuberlin.wiwiss.ng4j.NamedGraph;
import de.fuberlin.wiwiss.ng4j.NamedGraphSet;
import de.fuberlin.wiwiss.ng4j.Quad;
import de.fuberlin.wiwiss.ng4j.impl.NamedGraphSetImpl;


//import java.util.logging.Logger;

public class EntityModel  {
   
    Properties prop = new Properties();
    String logPath ;

    static ApplicationController controller = null;
    private ApplicationContext applicationContext;
    String message = "0";

    // private final static Logger logger = Logger.getLogger(RdfUploader.class
    // .getName());

    //private static Logger logger = LoggerFactory.getLogger(EntityModel.class);

    public EntityModel(ApplicationContext ctxt, RDFFormat f) {
        this.applicationContext = ctxt;
        try{
            //prop.load(new FileInputStream(new File(url.getFile()))); 
            InputStream in =  this.getClass().getResourceAsStream("/conf/log.conf");
            prop.load(in);
        }
        catch (Exception e) {
         System.out.println("Configuration file not found");
     }
        logPath = prop.getProperty("logpath");
       // String confPathFromProperty=prop.getProperty("logpath");
      //  File file = new File(this.getClass().getResource(confPathFromProperty).getPath()); 
      //  logPath = file.getAbsolutePath();
    }

    public ApplicationContext getApplicationContext() {
        return applicationContext;
    }

    /**
     * Upload Entities to ENS. It takes the Entities with their attributes from
     * the Google Refine RDF Repository and convert to ENS entity. It checks if
     * potential duplicates are in the ENS before creating the Entity
     * 
     * @param project
     *            : The current project hosting Entites to be created
     * @param engine
     *            : The current Engine hosting Entites to be created
     */
    public void createEntityToENS(Project project, Engine engine) throws IOException {
        RdfSchema schema;
        try {
            schema = Util.getProjectSchema(applicationContext, project);
        } catch (VocabularyIndexException ve) {
            throw new IOException("Unable to create index for RDF schema", ve);
        }
        Repository model = buildModel(project, engine, schema);

        try {
            RepositoryConnection con = model.getConnection();
            
            HashMap<Resource, String> distSubjs = getDistinctSubjects(con, "create");
            con.close();

            con = model.getConnection();

            Iterator<Resource> subjectIt = distSubjs.keySet().iterator();

            controller = new ApplicationController(con);

            DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
            Date date = new Date();
     
            //logs details such as : start time, okkam id, total entities, and end time to log file
            
            FileLogger logger = new FileLogger(logPath);
            logger.add("======================================================================================");
            logger.add(dateFormat.format(date));
            logger.add("Entities created  for project id = " + project.id);
            // logger.info("\t\t\t\t\tSubject = \t\t\t\t\t Okkam Id");
            Integer countSubjs = 0;
            while (subjectIt.hasNext()) {
                Resource subject = subjectIt.next();
                // if(controller.equals(null)){
                // System.out.println("Controller null due to exception, continue with new controler!!!");
                // controller = new ApplicationController(con);
                // }
                try {
                String okkamid = controller.createEntity(subject);
                if (!okkamid.equals("")) {
                    countSubjs++;
                    logger.add(okkamid);
                }
                }
                catch (Exception e) {
                   System.out.println(e.getStackTrace());
                }
            }
           
            date = new Date();
            logger.add(dateFormat.format(date));
            
            logger.add(countSubjs + " Entities Created");
            logger.add("======================================================================================");
            con.close();
            message = countSubjs.toString() + " Entities are Created";
        } catch (RepositoryException ex) {
            throw new RuntimeException(ex);
        }
    }

    /**
     * Export RDF dataset as TRIG format
     * @param project The currwent google refine project
     * @param engine  the engine
     * @param graph the name of the graph supplied by the user from google refine interface
     */

    public void uploadAsTRIG(Project project, Engine engine, String graph) throws IOException {
        RdfSchema schema;
        try {
            schema = Util.getProjectSchema(applicationContext, project);
        } catch (VocabularyIndexException ve) {
            throw new IOException("Unable to create index for RDF schema", ve);
        }
        Repository modelReppository = buildModel(project, engine, schema);

        NamedGraphSet graphset = new NamedGraphSetImpl();
        NamedGraph namedGraph = graphset.createGraph(graph);

        try {
            
            RepositoryConnection con = modelReppository.getConnection();
           
            Model tdbModel = ModelFactory.createDefaultModel();
            
            for(Vocabulary v:schema.getPrefixesMap().values()){
                String name=v.getName();
                String uri=v.getUri();   
                tdbModel.setNsPrefix(name, uri);
            }
                        
            RepositoryResult stmtIter = con.getStatements(null, null, null, true);
           
            while (stmtIter.hasNext()) {
                org.openrdf.model.Statement statment = (org.openrdf.model.Statement) stmtIter.next();

                Resource subject = statment.getSubject();
                com.hp.hpl.jena.rdf.model.Resource subjectJena = tdbModel.createResource(subject.stringValue()) ;

                String predicate = statment.getPredicate().toString();
                Property property = tdbModel.createProperty(predicate);
                
               String object =   statment.getObject().toString();
               Statement  s ;
               
               if(!object.toString().contains("#")){
                   object = object.replace("\"", "") ;
                s = tdbModel.createStatement(subjectJena, property, object);
               }
               else{
                   com.hp.hpl.jena.rdf.model.Resource objectJena = tdbModel.createResource(object) ;
                   s = tdbModel.createStatement(subjectJena, property, objectJena); 
               }
               namedGraph.add(s.asTriple());
               
            }
            
            for(Vocabulary v:schema.getPrefixesMap().values()){
                String name=v.getName();
                String uri=v.getUri();   
               namedGraph.getPrefixMapping().setNsPrefix(name, uri);
            }
             
            graphset.findQuads(com.hp.hpl.jena.graph.Node.ANY, 
                    com.hp.hpl.jena.graph.Node.ANY, 
                    com.hp.hpl.jena.graph.Node.ANY,
                    com.hp.hpl.jena.graph.Node.ANY);
           
            String fileName  = project.getMetadata().getName()+".trig";                    
            File trigFile = new File(fileName);
            trigFile.createNewFile();
            FileOutputStream fos = new FileOutputStream(trigFile);
            
            Model model = graphset.asJenaModel(graph);
            model.write(fos,"TRIG");
            message = "your data succesfully saved: location: " +trigFile.getAbsolutePath() ;
        } catch (RepositoryException ex) {
            message = "No data is saved as TRIG!!!" ;
            throw new RuntimeException(ex);
        }
        
       
    }

    
    
    
    public void createEntityToTDB(Project project, Engine engine, String rdfStore, String graph) throws IOException {
        RdfSchema schema;
        try {
            schema = Util.getProjectSchema(applicationContext, project);
        } catch (VocabularyIndexException ve) {
            throw new IOException("Unable to create index for RDF schema", ve);
        }
        Repository modelReppository = buildModel(project, engine, schema);

        try {
            
            RepositoryConnection con = modelReppository.getConnection();
            Model tdbModel = ModelFactory.createDefaultModel();
            
            for(Vocabulary v:schema.getPrefixesMap().values()){
                String name=v.getName();
                String uri=v.getUri();   
                tdbModel.setNsPrefix(name, uri);
            }
            
            
            NamedGraphSet graphset = new NamedGraphSetImpl();
            NamedGraph namedGraph = graphset.createGraph(graph);
            
            RepositoryResult stmtIter = con.getStatements(null, null, null, true);
           
            while (stmtIter.hasNext()) {
                org.openrdf.model.Statement statment = (org.openrdf.model.Statement) stmtIter.next();

                Resource subject = statment.getSubject();
                com.hp.hpl.jena.rdf.model.Resource subjectJena = tdbModel.createResource(subject.stringValue()) ;

                String predicate = statment.getPredicate().toString();
                Property property = tdbModel.createProperty(predicate);
                
               String object =   statment.getObject().toString();
               Statement  s ;
               
               if(!object.toString().contains("#")){
                   object = object.replace("\"", "") ;
                s = tdbModel.createStatement(subjectJena, property, object);
               }
               else{
                   com.hp.hpl.jena.rdf.model.Resource objectJena = tdbModel.createResource(object) ;
                   s = tdbModel.createStatement(subjectJena, property, objectJena); 
               }
               namedGraph.add(s.asTriple());
               
            }
           
            
            /*
             * prepare the named graph to become a sparql update request
             */
            QuadDataAcc data = new QuadDataAcc();
            Iterator iter = graphset.findQuads(com.hp.hpl.jena.graph.Node.ANY, 
                    com.hp.hpl.jena.graph.Node.ANY, 
                    com.hp.hpl.jena.graph.Node.ANY,
                    com.hp.hpl.jena.graph.Node.ANY);
            
            com.hp.hpl.jena.sparql.core.Quad quad = null;
            Quad next = null;
            while (iter.hasNext()) {
                    next = (Quad) iter.next();
                    quad = new com.hp.hpl.jena.sparql.core.Quad(next.getGraphName(),
                                    next.getTriple());
                    data.addQuad(quad);
            }

            // Create an Update, with the quads extracted
            // from the named graph
            Update updateData = new UpdateDataInsert(data);

            // Create the sparql update request,
            // and add prefixes
            UpdateRequest req = new UpdateRequest();
            
            for(Vocabulary v:schema.getPrefixesMap().values()){
                String name=v.getName();
                String uri=v.getUri();   
                req.setPrefix(name, uri);
            }
            
            req.add(updateData);

            // Finally, execute the sparql update
            // against the sparql endpoint
            UpdateRemote update = new UpdateRemote();
           try{
               update.execute(req, rdfStore);
               graphset.close();   
               con.close() ;
               message = "Succesfully Uploaded to TDB" ;
           }
           catch (Exception e) {
            message = "Data Upload Fails" ;
        }
           
            
        } catch (RepositoryException ex) {
            throw new RuntimeException(ex);
        }
    }
    
    
    
    
    /**
     * Update Entities Once cells are modified in the Refine, the change will be
     * updated based on the retrieved okkam ID The OkkamID MUST have okkamID
     * when the RDF is prepared in refine, otherwise, updating will FAIL
     */

    public void updateENSEntity(Project project, Engine engine)
            throws IOException {
        RdfSchema schema;
        try {
            schema = Util.getProjectSchema(applicationContext, project);
        } catch (VocabularyIndexException ve) {
            throw new IOException("Unable to create index for RDF schema", ve);
        }
        Repository model = buildModel(project, engine, schema);

        try {
            RepositoryConnection con = model.getConnection();

            HashMap<Resource, String> distSubjs = getDistinctSubjects(con, "update");
            con.close();

            con = model.getConnection();
            Iterator<Resource> idistSubj = distSubjs.keySet().iterator();

            new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
            new Date();

            System.out.println("Updating Starts");
            Integer countSubjs = 0;

            if (!distSubjs.isEmpty()) {
                controller = new ApplicationController(con);
                while (idistSubj.hasNext()) {

                    Resource distSubj = idistSubj.next();

                    String okkamid = distSubjs.get(distSubj);

                    try {
                        controller.updateEntity(distSubj, okkamid);
                        countSubjs++;
                    } catch (Exception e) {
                        e.printStackTrace();
                        System.out.println("Some Update fails");
                    }
                }
                message = countSubjs.toString() + " Entities are updated";
                System.out.println(countSubjs + " Entities are updated");
                con.close();
            } else {
                message = "The RDf data do not contain okkamID of entities to be updated";
            }

        } catch (RepositoryException ex) {
            throw new RuntimeException(ex);
        }
    }

    public String getMessage() {
        return message;
    }

    public Repository buildModel(final Project project, Engine engine, RdfSchema schema)
            throws IOException {
        RdfRowVisitor visitor = new RdfRowVisitor(schema) {

            @Override
            public boolean visit(Project project, int rowIndex, Row row) {
                for (Node root : roots) {
                    root.createNode(baseUri, factory, con, project, row, rowIndex, blanks);
                }
                return false;
            }
        };
        Repository model = buildModel(project, engine, visitor);

        return model;
    }

    public static Repository buildModel(Project project, Engine engine, RdfRowVisitor visitor) {
        // RdfSchema schema = Util.getProjectSchema(project);
        FilteredRows filteredRows = engine.getAllFilteredRows();
        filteredRows.accept(project, visitor);
        return visitor.getModel();

    }

    public boolean takeWriter() {
        return true;
    }

    /*
     * returns back distinct subjects that will help in building entities with
     * thier properties later on
     */
    public HashMap<org.openrdf.model.Resource, String> getDistinctSubjects(RepositoryConnection model, String mode)
            throws RepositoryException {
        HashMap<org.openrdf.model.Resource, String> subjects = new HashMap<org.openrdf.model.Resource, String>();
        RepositoryResult stmtIter = model.getStatements(null, null, null, true);
        while (stmtIter.hasNext()) {

            org.openrdf.model.Statement statment = (org.openrdf.model.Statement) stmtIter.next();

            Resource subject = statment.getSubject();

            statment.getPredicate();

            String okidval = statment.getObject().stringValue();

            if (mode.equals("update") && okidval.startsWith("http://www.okkam.org/ens")) { // make
                                                                                           // sure
                                                                                           // it
                                                                                           // is
                                                                                           // okkamID
                                                                                           // in
                                                                                           // refine
                                                                                           // RDF
                                                                                           // dataset

                subjects.put((org.openrdf.model.Resource) subject, okidval);
            } else if (mode.equals("create")) {
                subjects.put((org.openrdf.model.Resource) subject, "");// for
                                                                       // creation,
                                                                       // we
                                                                       // won't
                                                                       // have
                                                                       // okkam
                                                                       // ID
            }
        }
        return subjects;

    }

    public static abstract class RdfRowVisitor implements RowVisitor {

        protected Repository model;
        protected URI baseUri;
        protected BNode[] blanks;
        protected List<Node> roots;
        private RdfSchema schema;

        protected ValueFactory factory;
        protected RepositoryConnection con;

        public Repository getModel() {
            return model;
        }

        public RdfRowVisitor(RdfSchema schema) {
            this.schema = schema;
            baseUri = schema.getBaseUri();
            roots = schema.getRoots();

            // initilaizing repository
            model = new SailRepository(new MemoryStore());
            try {
                model.initialize();
                RepositoryConnection con = model.getConnection();
                try {
                    ValueFactory factory = con.getValueFactory();
                    blanks = new BNode[schema.get_blanks().size()];
                    for (int i = 0; i < blanks.length; i++) {
                        blanks[i] = factory.createBNode();
                    }
                } finally {
                    con.close();
                }
            } catch (RepositoryException ex) {
                throw new RuntimeException(ex);
            }
        }

        public void end(Project project) {
            try {
                if (con.isOpen()) {
                    con.close();
                }
            } catch (RepositoryException e) {
                throw new RuntimeException("", e);
            }
        }

        public void start(Project project) {
            try {
                con = model.getConnection();
                factory = con.getValueFactory();
            } catch (RepositoryException ex) {
                throw new RuntimeException("", ex);
            }
        }

        abstract public boolean visit(Project project, int rowIndex, Row row);

        public RdfSchema getRdfSchema() {
            return schema;
        }
        
                
    }

}
