/*
 *  Copyright 2004 Blandware (http://www.blandware.com)
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */
package com.blandware.atleap.persistence.hibernate.core;

import com.blandware.atleap.common.util.PartialCollection;
import com.blandware.atleap.common.util.QueryInfo;
import com.blandware.atleap.model.core.MenuItem;
import com.blandware.atleap.persistence.core.BackupDAO;
import com.blandware.atleap.persistence.core.MenuDAO;
import com.blandware.atleap.persistence.exception.BackupFileAccessException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dbunit.database.*;
import org.dbunit.dataset.CachedDataSet;
import org.dbunit.dataset.FilteredDataSet;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.datatype.IDataTypeFactory;
import org.dbunit.dataset.filter.ITableFilter;
import org.dbunit.dataset.stream.IDataSetProducer;
import org.dbunit.dataset.xml.XmlDataSet;
import org.dbunit.dataset.xml.XmlProducer;
import org.dbunit.ext.mssql.InsertIdentityOperation;
import org.dbunit.operation.DatabaseOperation;
import org.hibernate.FlushMode;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.cfg.Configuration;
import org.hibernate.dialect.Dialect;
import org.hibernate.util.StringHelper;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.HibernateTemplate;
import org.springframework.orm.hibernate3.LocalSessionFactoryBean;
import org.xml.sax.InputSource;

import java.io.*;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Date;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;

/**
 * <p>DAO for performing backup/restore database operations</p>
 * <p/>
 * <p><a href="BackupDAOHibernate.java.html"><i>View Source</i></a></p>
 *
 * @author Andrey Grebnev <a href="mailto:andrey.grebnev@blandware.com">&lt;andrey.grebnev@blandware.com&gt;</a>
 * @version $Revision: 1.9 $ $Date: 2008/07/08 12:04:28 $
 */
public class BackupDAOHibernate extends BaseDAOHibernate implements BackupDAO {

    private static final String INITIAL_DATA_FILE = "initial-data.xml";
    private static final SimpleDateFormat ARCHIVE_PATTERN_FORMATTER = new SimpleDateFormat("yyyyMMdd-HHmmss'.zip'");
    private static final String ARCHIVE_ENTRY_NAME = "db-export.xml";
    private static final String TEST_TABLE_NAME = "al_core_localizable";
    private static final boolean TEST_TABLE_NAME_QUOTED = true;

    protected transient final Log log = LogFactory.getLog(getClass());


    protected String escapePattern = "\"?\"";

    /**
     * Set escape pattern for DBMS.
     *
     * @param escapePattern e.g. <code>"?"</code> or <code>`?`</code> or <code>[?]</code>
     */
    public void setEscapePattern(String escapePattern) {
        this.escapePattern = escapePattern;
    }

    protected String databaseType = "mysql";

    /**
     * Setup database type. If <code>sqlserver</code> we will use MSSQL_CLEAN_INSERT and CLEAN_INSERT in all other cases.
     *
     * @param databaseType  type to set
     */
    public void setDatabaseType(String databaseType) {
        this.databaseType = databaseType;
    }

    protected String schema = null;

    /**
     * Set schema for database tables.
     *
     * @param schema    schema to set    
     */
    public void setSchema(String schema) {
        if (schema != null && schema.trim().length() == 0)
            this.schema = null;
        else
            this.schema = schema;
    }

    protected String backupDirPath = ".";

    /**
     * Setup path to backup directory.
     *
     * @param backupDirPath     path to set
     */
    public void setBackupDirPath(String backupDirPath) {
        this.backupDirPath = backupDirPath;
    }

    protected String dataTypeFactoryClass = "org.dbunit.ext.mysql.MySqlDataTypeFactory";

    /**
     * Setup datatype factory for DBUnit.
     *
     * @param dataTypeFactoryClass  name of class name of factory
     */
    public void setDataTypeFactoryClass(String dataTypeFactoryClass) {
        this.dataTypeFactoryClass = dataTypeFactoryClass;
    }

    protected LocalSessionFactoryBean localSessionFactoryBean = null;

    /**
     * Setup local session factory bean.
     *
     * @param localSessionFactoryBean   bean
     */
    public void setLocalSessionFactoryBean(LocalSessionFactoryBean localSessionFactoryBean) {
        this.localSessionFactoryBean = localSessionFactoryBean;
    }

    /**
     * DAO to work with menu items
     */
    protected MenuDAO menuDAO = null;

    /**
     * Sets menu DAO.
     *
     * @param menuDAO   DAO to set
     */
    public void setMenuDAO(MenuDAO menuDAO) {
        this.menuDAO = menuDAO;
    }

    /**
     * @see com.blandware.atleap.persistence.core.BackupDAO#backup()
     */
    public void backup() throws BackupFileAccessException {
        HibernateTemplate hibernateTemplate = new HibernateTemplate(getSessionFactory());
        hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER);
        hibernateTemplate.execute(
                new HibernateCallback() {
                    public Object doInHibernate(Session session) throws HibernateException, SQLException {
                        Connection con = session.connection();

                        boolean oldAutoCommit = con.getAutoCommit();
                        if (!oldAutoCommit) {
                            con.setAutoCommit(true);
                        }

                        OutputStream out = null;
                        try {
                            IDatabaseConnection connection = getConnection(con);

                            // Setup the ResultSet table factory
                            IResultSetTableFactory factory = null;
                            factory = new CachedResultSetTableFactory();
                            DatabaseConfig config = connection.getConfig();
                            config.setFeature(DatabaseConfig.FEATURE_SKIP_ORACLE_RECYCLEBIN_TABLES, true);
                            config.setProperty(DatabaseConfig.PROPERTY_RESULTSET_TABLE_FACTORY, factory);
                            IDataSet dataset = connection.createDataSet();

                            // Use topologically sorted database
                            ITableFilter filter = new DatabaseSequenceFilter(connection);
                            dataset = new FilteredDataSet(filter, dataset);

                            out = openOutput();
                            XmlDataSet.write(dataset, out);

                            if (log.isInfoEnabled()) {
                                log.info("Backuping database into file dated " + new Date() + " finished.");
                            }

                        } catch (Exception ex) {
                            String message = "Cannot backup database into dir " + backupDirPath;
                            if (log.isWarnEnabled()) {
                                log.warn(message, ex);
                            }
                            throw new HibernateException(message, ex);
                        } finally {
                            try {
                                out.close();
                            } catch (Exception e) {
                                // swallow
                            }

                            if (!oldAutoCommit) {
                                con.setAutoCommit(false);
                            }
                        }

                        return null;
                    }
                }
        );

    }

    /**
     * Execute the given schema script on the given JDBC Connection.
     * Will log unsuccessful statements and continue to execute.
     * 
     * @param con the JDBC Connection to execute the script on
     * @param sql the SQL statements to execute
     * @throws SQLException if thrown by JDBC methods
     */
    protected void executeSQLScript(Connection con, String []sql) throws SQLException {
        if (sql != null && sql.length > 0) {
            Statement stmt = con.createStatement();
            try {
                for (int i = 0; i < sql.length; i++) {
                    if (log.isDebugEnabled()) {
                        log.debug("Executing schema statement: " + sql[i]);
                    }
                    try {
                        stmt.executeUpdate(sql[i]);
                    }
                    catch (SQLException ex) {
                        if (log.isWarnEnabled()) {
                            log.warn("Unsuccessful schema statement: " + sql[i], ex);
                        }
                    }
                }
            }
            finally {
                if (stmt != null) {
                    try {
                        stmt.close();
                    }
                    catch (SQLException ex) {
                        if (log.isWarnEnabled()) {
                            log.warn("Could not close JDBC Statement", ex);
                        }
                    }
                    catch (RuntimeException ex) {
                        if (log.isErrorEnabled()) {
                            log.error("Unexpected exception on closing JDBC Statement", ex);
                        }
                    }
                }
            }
        }
    }

    /**
     * @see com.blandware.atleap.persistence.core.BackupDAO#restore(java.util.Date, java.lang.Boolean)
     */
    public void restore(final Date date, final Boolean force) throws BackupFileAccessException {
        restore(date, null, force);
    }

    /**
     * @see com.blandware.atleap.persistence.core.BackupDAO#restore(java.io.InputStream, Boolean) 
     */
    public void restore(final InputStream inputStream, final Boolean force) {
        try {
            restore(null, inputStream, force);
        } catch (BackupFileAccessException e) {
            // this actually cannot happen, by still log it
            if (log.isErrorEnabled()) {
                log.error("Cannot restore: " + e);
            }
        }
    }

    /**
     * Does restore job. If inputStream is not null, it's used, overwise date
     * is considered.
     *
     * @param date date of backup to restore. If inputStream is not null, date
     * is ignored.
     * @param inputStream if not null, it's used to supply backup data
     * @param force if it is <code>false</code> we will create tables and load data only if tables do not exist. If it is <code>true</code>
     * we will drop and create tables and load data.
     * @throws BackupFileAccessException thrown if directory/file access is prohibeted
     */
    protected void restore(final Date date, final InputStream inputStream, final Boolean force) throws BackupFileAccessException {

        HibernateTemplate hibernateTemplate = new HibernateTemplate(getSessionFactory());
        hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER);
        hibernateTemplate.execute(
                new HibernateCallback() {
                    public Object doInHibernate(Session session) throws HibernateException, SQLException {
                        Connection con = session.connection();

                        boolean oldAutoCommit = con.getAutoCommit();
                        if (!oldAutoCommit) {
                            con.setAutoCommit(true);
                        }
                        InputStream is = null;
                        try {
                            is = openInput(date, inputStream);

                            Configuration configuration = localSessionFactoryBean.getConfiguration();
                            Dialect dialect = Dialect.getDialect(configuration.getProperties());

                            boolean tablesExist = isTables(con);

                            String[] sql = null;

                            if (!tablesExist) {
                                //create tables
                                sql = configuration.generateSchemaCreationScript(dialect);
                                executeSQLScript(con, sql);

                                if (log.isInfoEnabled()) {
                                    log.info("Database tables created.");
                                }
                            } else {
                                if (Boolean.TRUE.equals(force)) {
                                    //delete menu items as they have slef reference and some databases e.g. MySQL, HSQLDB
                                    //cannot delete them
                                    session.setFlushMode(FlushMode.AUTO);

                                    Collection menuItems = menuDAO.listMenuItems(null);
                                    for (Iterator iterator = menuItems.iterator(); iterator.hasNext();) {
                                        MenuItem menuItem = (MenuItem) iterator.next();
                                        menuDAO.deleteMenuItem(menuItem);
                                    }
                                    session.flush();
                                    session.setFlushMode(FlushMode.MANUAL);
                                    session.clear();
                                }
                            }

                            if (!tablesExist || Boolean.TRUE.equals(force)) {
                                //load data
                                IDatabaseConnection connection = getConnection(con);

                                if (is == null) {
                                    throw new BackupFileAccessException("Cannot read file");
                                }

                                IDataSetProducer producer = new XmlProducer(new InputSource(is));
                                IDataSet dataset = new CachedDataSet(producer);

                                DatabaseOperation operation = DatabaseOperation.CLEAN_INSERT;
                                if ("sqlserver".equalsIgnoreCase(databaseType)) {
                                    operation = InsertIdentityOperation.CLEAN_INSERT;
                                }

                                operation.execute(connection, dataset);

                                if (log.isInfoEnabled()) {
                                    if (date == null)
                                        log.info("Initial data from file " + INITIAL_DATA_FILE + " loaded.");
                                    else
                                        log.info("Initial data from file dated " + date + " loaded.");
                                }
                            }


                        } catch (Exception ex) {
                            String message = "Cannot restore database with date " + date;
                            if (log.isWarnEnabled()) {
                                log.warn(message, ex);
                            }
                            throw new HibernateException(message, ex);
                        } finally {
                            try {
                                if (is != null) {
                                    is.close();
                                }
                            } catch (Exception e) {
                                // swallow
                            }

                            if (!oldAutoCommit) {
                                con.setAutoCommit(false);
                            }
                        }

                        // clearing second-level cache
                        if (getSessionFactory() instanceof SessionFactoryImplementor) {
                            SessionFactoryImplementor factoryImplementor = (SessionFactoryImplementor) getSessionFactory();
                            Map cacheRegionsMap = factoryImplementor.getAllSecondLevelCacheRegions();
                            Collection cacheRegions = cacheRegionsMap.values();
                            Iterator i = cacheRegions.iterator();
                            while (i.hasNext()) {
                                org.hibernate.cache.Cache cache = (org.hibernate.cache.Cache) i.next();
                                cache.clear();
                            }
                            if (log.isDebugEnabled()) {
                                log.debug("Second-level cache cleared");
                            }
                        } else {
                            if (log.isWarnEnabled()) {
                                log.warn("Could not clear second-level cache");
                            }
                        }

                        return null;
                    }
                }
        );

    }

    /**
     * @see com.blandware.atleap.persistence.core.BackupDAO#canListArchives()
     */
    public boolean canListArchives() {
        File dir = new File(backupDirPath);
        if (!dir.canRead()) {
            return false;
        }
        String files[] = dir.list(new FilenameFilter() {
            public boolean accept(File dir, String name) {
                return name.endsWith(".zip");
            }
        }
        );

        if (files == null) {
            return false;
        }

        return true;
    }

    /**
     * @see com.blandware.atleap.persistence.core.BackupDAO#listArchives(com.blandware.atleap.common.util.QueryInfo)
     */
    public PartialCollection listArchives(QueryInfo queryInfo) throws BackupFileAccessException {
        File dir = new File(backupDirPath);
        createDirIfNotExists();
        String accessErrorMessage = "Cannot read " + backupDirPath + " directory";
        if (!dir.canRead()) {
            throw new BackupFileAccessException(accessErrorMessage);
        }
        String files[] = dir.list(new FilenameFilter() {
            public boolean accept(File dir, String name) {
                return name.endsWith(".zip");
            }
        }
        );

        if (files == null) {
            throw new BackupFileAccessException(accessErrorMessage);
        }

        List list = new ArrayList(files.length + 1);
        for (int i = 0; i < files.length; i++) {
            String fileName = files[i];
            Date date = null;
            try {
                date = ARCHIVE_PATTERN_FORMATTER.parse(fileName);
            } catch (Exception ex) {
                // swallow
            }
            if (date != null)
                list.add(date);
        }
        Collections.sort(list);

        int fromIndex = queryInfo.getOffset().intValue();
        int toIndex = queryInfo.getOffset().intValue() + queryInfo.getLimit().intValue() < list.size() ? queryInfo.getOffset().intValue() + queryInfo.getLimit().intValue() : list.size();

        return new PartialCollection(list.subList(fromIndex, toIndex), list.size());
    }

    /**
     * Get DBUnit configured connection.
     *
     * @param con SQL connection
     * @return DBUnit connection
     * @throws Exception if e.g. datatype factory class not found
     */
    protected IDatabaseConnection getConnection(Connection con) throws Exception {
        IDatabaseConnection connection = new DatabaseConnection(con, schema);
        DatabaseConfig config = connection.getConfig();
        config.setFeature(DatabaseConfig.FEATURE_BATCHED_STATEMENTS, false);
        config.setFeature(DatabaseConfig.FEATURE_QUALIFIED_TABLE_NAMES, false);
        config.setFeature(DatabaseConfig.FEATURE_DATATYPE_WARNING, true);
        config.setProperty(DatabaseConfig.PROPERTY_ESCAPE_PATTERN, escapePattern);
        config.setProperty(DatabaseConfig.PROPERTY_RESULTSET_TABLE_FACTORY,
                new ForwardOnlyResultSetTableFactory());
        IDataTypeFactory dataTypeFactory = (IDataTypeFactory) Class.forName(dataTypeFactoryClass).newInstance();
        config.setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, dataTypeFactory);

        return connection;
    }

    /**
     * Opens input stream for reading data from archive.
     * 
     * @param archiveDate if date is <code>null</code> read initial data as resource.
     * If inputStream is not <code>null</code> this parameter is ignored.
     * @param inputStream input stream which supplies zipped data of backup
     * archive. If <code>null</code>, archiveDate is used.
     * @return InputStream. Do not forget to close it.
     * @throws Exception if something wrong
     */
    protected InputStream openInput(Date archiveDate, InputStream inputStream) throws Exception {
        if (archiveDate != null || inputStream != null) {
            File file = null;
            InputStream is;
            if (inputStream == null) {
                file = new File(backupDirPath, ARCHIVE_PATTERN_FORMATTER.format(archiveDate));
                if (!file.canRead()) {
                    throw new BackupFileAccessException("Cannot read from " + file.getPath() + " file");
                }
                is = new FileInputStream(file);
            } else {
                is = inputStream;
            }
            ZipInputStream zis = new ZipInputStream(new BufferedInputStream(is));
            ZipEntry entry = null;
            while((entry = zis.getNextEntry()) != null) {
                if (entry.getName().equalsIgnoreCase(ARCHIVE_ENTRY_NAME)) {
                    return zis;
                }
            }
            if (log.isErrorEnabled()) {
                log.error("Cannot find " + ARCHIVE_ENTRY_NAME + " inside archive");
            }
            return null;

        } else {
            ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
            if (classLoader == null) {
                classLoader = getClass().getClassLoader();
            }

            InputStream is = classLoader.getResourceAsStream(INITIAL_DATA_FILE);
            if (is == null) {
                if (log.isErrorEnabled()) {
                    log.error("Cannot load " + INITIAL_DATA_FILE);
                }
            }
            return is;
        }
    }

    /**
     * Open output stream for writing data into archive
     * @return output stream. Do not forget to close it.
     * @throws Exception  if something wrong
     */
    protected OutputStream openOutput() throws Exception {
    	File dir = new File(backupDirPath);
        createDirIfNotExists();
        if (!dir.canWrite()) {
            throw new BackupFileAccessException("Cannot write to " + dir.getPath() + " dir");
        }
    	
        File file = new File(backupDirPath, ARCHIVE_PATTERN_FORMATTER.format(new Date()));
        ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(file)));
        ZipEntry entry = new ZipEntry(ARCHIVE_ENTRY_NAME);
        out.putNextEntry(entry);
        return out;
    }

    /**
     * Are there some tables in database?
     * 
     * @param connection    connection to use
     * @return true is TEST_TABLE_NAME exists
     * @throws java.sql.SQLException thrown if some SQL error occures
     */
    protected boolean isTables(Connection connection) throws SQLException {
        boolean result = false;
        try {
            DatabaseMetaData metadata = connection.getMetaData();

            ResultSet rs = null;
            try {
                if (TEST_TABLE_NAME_QUOTED) {
                    if (metadata.supportsMixedCaseQuotedIdentifiers() || metadata.storesMixedCaseQuotedIdentifiers()) {
                        rs = metadata.getTables(
                                null,
                                schema,
                                TEST_TABLE_NAME,
                                new String[] {"TABLE"}
                            );
                    } else if (metadata.storesLowerCaseQuotedIdentifiers()) {
                        rs = metadata.getTables(
                                null,
                                StringHelper.toLowerCase(schema),
                                StringHelper.toLowerCase(TEST_TABLE_NAME),
                                new String[] {"TABLE"}
                            );
                    } else {
                        rs = metadata.getTables(
                                null,
                                StringHelper.toUpperCase(schema),
                                StringHelper.toUpperCase(TEST_TABLE_NAME),
                                new String[] {"TABLE"}
                            );
                    }

                } else {
                    if (metadata.supportsMixedCaseIdentifiers() || metadata.storesMixedCaseIdentifiers()) {
                        rs = metadata.getTables(
                                null,
                                schema,
                                TEST_TABLE_NAME,
                                new String[] {"TABLE"}
                            );
                    } else if (metadata.storesLowerCaseIdentifiers()) {
                        rs = metadata.getTables(
                                null,
                                StringHelper.toLowerCase(schema),
                                StringHelper.toLowerCase(TEST_TABLE_NAME),
                                new String[] {"TABLE"}
                            );
                    } else {
                        rs = metadata.getTables(
                                null,
                                StringHelper.toUpperCase(schema),
                                StringHelper.toUpperCase(TEST_TABLE_NAME),
                                new String[] {"TABLE"}
                            );
                    }
                }
                result = rs.next();

            } finally {
                if (rs!=null) rs.close();
            }

        } catch(SQLException ex) {
            if (log.isErrorEnabled()) {
                log.error("Could not get metadata", ex);
            }
            throw ex;
        }
        return result;
    }

    /**
     * Tries to create a dbbackup directory if it does not exist.
     *
     * @return true if dbbackup directory already exists or was created
     * successfully.
     */
    public boolean createDirIfNotExists() {
    	File dir = new File(backupDirPath);
        boolean created = false;

        if (!dir.exists()) {
            if (log.isDebugEnabled()) {
                log.debug(backupDirPath + " does not exist, trying to create it");
            }
            created = dir.mkdir();
        } else {
            created = true;
        }

        return created;
    }

    /**
     * @see com.blandware.atleap.persistence.core.BackupDAO#getBackupFileData(java.util.Date)
     */
    public byte[] getBackupFileData(Date date) {
        String fileName = ARCHIVE_PATTERN_FORMATTER.format(date);
        File file = new File(backupDirPath, fileName);
        int length = (int) file.length();
        byte[] result = new byte[length];

        FileInputStream inputStream = null;
        try {
            inputStream = new FileInputStream(file);
            inputStream.read(result);
        } catch (IOException e) {
            if (log.isErrorEnabled()) {
                log.error("Cannot retrieve backup file data: " + e);
            }
            result = null;
        } finally {
            if (inputStream != null) {
                try {
                    inputStream.close();
                } catch (IOException e) {
                    // ignore
                }
            }
        }

        return result;
    }

    /**
     * @see com.blandware.atleap.persistence.core.BackupDAO#getBackupFileName(java.util.Date) 
     */
    public String getBackupFileName(Date date) {
        return ARCHIVE_PATTERN_FORMATTER.format(date);
    }
}
