/**
 * $Id: ConnectDao.java 8 2011-10-02 09:35:56Z adriftrock@gmail.com $
 */

package com.swehr.server.daos.impl;

import com.hp.hpl.jena.query.*;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.LayoutType;
import com.hp.hpl.jena.update.*;
import com.swehr.utils.ConnectionUtil;
import org.apache.log4j.Logger;

import java.io.ByteArrayOutputStream;
import java.sql.*;

/**
 * @author Rock Huang
 * @version 0.1
 */
public class ConnectDao {
    private static Logger logger = Logger.getLogger(ConnectDao.class);

    private Store store = null;
    private Dataset ds = null;
    private QueryExecution qe = null;

    public ConnectDao() {
        try {
            open();
        } catch (SQLException e) {
            logger.error(e);
            e.printStackTrace();
        }
    }

    public void setDefaultNS(StringBuffer sparql) {
        sparql.append(Const.XSD).append("\n");
        sparql.append(Const.RDF).append("\n");
        sparql.append(Const.RDFS).append("\n");
        sparql.append(Const.OWL).append("\n");
        sparql.append(Const.EHR).append("\n");
    }

    public void setLinkedNS(StringBuffer sparql) {
        sparql.append(Const.DBPO).append("\n");
        sparql.append(Const.DBPP).append("\n");
        sparql.append(Const.SCHM).append("\n");
        sparql.append(Const.FOAF).append("\n");
    }

    public String toJson(String sparql) throws Exception {
        return toJson(executeQuery(sparql));
    }

    public String toJson(ResultSet rs) {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        ResultSetFormatter.outputAsJSON(baos, rs);
        return baos.toString();
    }

    /**
     * @param sparql A SPARQL command string.
     * @return ResultSet
     * @throws Exception An Exception might throw.
     */
    public ResultSet executeQuery(String sparql) throws Exception {
        logger.debug("executeQuery(), the SPARQL has bee executed.\n" + sparql);
        qe = QueryExecutionFactory.create(sparql, ds);
        return qe.execSelect();
    }

    public void executeUpdate(String sparql, InferenceDao inf) throws Exception {
        logger.debug("executeUpdate(), the SPARQL has been executed.\n" + sparql);
        GraphStore graphStore = GraphStoreFactory.create(ds);
        UpdateRequest ur = UpdateFactory.create(sparql);
        UpdateAction.execute(ur, graphStore);
        if (inf != null) {
            inf.getHrModel().close();
            inf.begin();
        }
    }

    /**
     * Do not need to update inference model.
     *
     * @param sparql A SPARQL command.
     * @throws Exception An exception might throw.
     */
    public void executeUpdate(String sparql) throws Exception {
        executeUpdate(sparql, null);
    }

    public boolean executeAsk(String sparql) {
        logger.debug("executeAsk(), the SPARQL has been executed. \n" + sparql);
        qe = QueryExecutionFactory.create(sparql,ds);
        boolean rs =  qe.execAsk();
        close();
        return rs;
    }

    public void close() {
        if (qe != null) {
            qe.close();
        }
        if (ds != null) {
            ds.close();
        }
        if (store != null) {
            store.getConnection().close();
            store.close();
        }
    }

    public void open() throws SQLException {
        if (store == null || store.isClosed()) {
            store = getStore();
            ds = SDBFactory.connectDataset(store);
        }
    }

    public boolean execUpdate(String s, InferenceDao inf) {
        try {
            open();
            executeUpdate(s, inf);
            return true;
        } catch (Exception e) {
            logger.error("execute failed.", e);
            return false;
        } finally {
            close();
        }
    }

    public Dataset getDs() {
        return ds;
    }

    protected Store getStore() throws SQLException {
        Connection jdbcConn = ConnectionUtil.getInstance().getConnection();
        logger.debug("### Get DB connection: [" + jdbcConn.toString() + "] ###");
        SDBConnection conn = SDBFactory.createConnection(jdbcConn);
        StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.H2);
        return SDBFactory.connectStore(conn, storeDesc);
    }


    static {
        Connection conn = null;
        try {
            conn = ConnectionUtil.getInstance().getConnection();
            // to test if there is a existing table.
            java.sql.ResultSet rs = conn.createStatement().executeQuery(
                    "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES " +
                            "WHERE TABLE_NAME='NODES'");
            if (!rs.first()) {
                Store s = SDBFactory.connectStore(SDBFactory.createConnection(conn),
                    new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.H2));
                s.getTableFormatter().create();
                logger.debug("Triple Store: Tables Formatted.");
            } else {
                logger.debug("Triple Store: Tables Existed.");
            }
        } catch (SQLException e) {
            logger.debug("An error occurred while initializing triple store");
            e.printStackTrace();
        } finally {
            if (conn != null) {
                try {
                    conn.close();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }
        }
    }
}