package org.pentaho.di.job.entries;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.annotations.JobEntry;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.database.MSSQLServerNativeDatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;

@JobEntry(id = "SQLBak", 
	name = "JobEntrySQLBak.Name", 
	description = "JobEntrySQLBak.TooltipDesc", 
	image = "org/pentaho/di/job/entries/resources/demo.svg", 
	categoryDescription = "DTD_VALIDATOR.Category", 
	i18nPackageName = "org.pentaho.di.job.entries", 
	documentationUrl = "JobEntrySQLBak.DocumentationURL", 
	casesUrl = "JobEntrySQLBak.CasesURL", 
	forumUrl = "JobEntrySQLBak.ForumURL")
public class JobEntryBakProcessor extends JobEntryBase implements Cloneable, JobEntryInterface {

	/**
	 * The PKG member is used when looking up internationalized strings. The
	 * properties file with localized keys is expected to reside in {the package of
	 * the class specified}/messages/messages_{locale}.properties
	 */
	private static Class<?> PKG = JobEntryBakProcessor.class; // for i18n purposes $NON-NLS-1$

	/**
	 * 备份文件名称
	 */
	private String bakfilename;
	
	/**
	 * 还原的目标数据库名称
	 */
	private String targetDBName;

	/**
	 * 目标SQL Server主机
	 */
	private String host;

	/**
	 * 目标SQL Server端口
	 */
	private String port;

	/**
	 * 目标SQL Server数据库名称
	 */
	private String dbname;

	/**
	 * 目标SQL Server用户
	 */
	private String user;

	/**
	 * 目标SQL Server密码
	 */
	private String passwd;

	/**
	 * The JobEntry constructor executes super() and initializes its fields with
	 * sensible defaults for new instances of the job entry.
	 * 
	 * @param name
	 *            the name of the new job entry
	 */
	public JobEntryBakProcessor(String name) {
		super(name, "");

		// the default is to generate a positive outcome
	}

	/**
	 * No-Arguments constructor for convenience purposes.
	 */
	public JobEntryBakProcessor() {
		this("");
	}

	/**
	 * Let PDI know the class name to use for the dialog.
	 * 
	 * @return the class name to use for the dialog for this job entry
	 */
	public String getDialogClassName() {
		return JobEntrySQLServerBakDialog.class.getName();
	}

	/**
	 * This method is used when a job entry is duplicated in Spoon. It needs to
	 * return a deep copy of this job entry object. Be sure to create proper deep
	 * copies if the job entry configuration is stored in modifiable objects.
	 * 
	 * See org.pentaho.di.trans.steps.rowgenerator.RowGeneratorMeta.clone() for an
	 * example on creating a deep copy of an object.
	 * 
	 * @return a deep copy of this
	 */
	public Object clone() {
		JobEntryBakProcessor je = (JobEntryBakProcessor) super.clone();
		return je;
	}

	/**
	 * This method is called by Spoon when a job entry needs to serialize its
	 * configuration to XML. The expected return value is an XML fragment consisting
	 * of one or more XML tags.
	 * 
	 * Please use org.pentaho.di.core.xml.XMLHandler to conveniently generate the
	 * XML.
	 * 
	 * Note: the returned string must include the output of super.getXML() as well
	 * 
	 * @return a string containing the XML serialization of this job entry
	 */
	@Override
	public String getXML() {
		StringBuilder retval = new StringBuilder(200);

		retval.append(super.getXML());

		retval.append("      ").append(XMLHandler.addTagValue("bakfilename", bakfilename));
		retval.append("      ").append(XMLHandler.addTagValue("targetdbname", targetDBName));
		retval.append("      ").append(XMLHandler.addTagValue("host", host));
		retval.append("      ").append(XMLHandler.addTagValue("port", port));
		retval.append("      ").append(XMLHandler.addTagValue("dbname", dbname));
		retval.append("      ").append(XMLHandler.addTagValue("user", user));
		retval.append("      ").append(XMLHandler.addTagValue("passwd", passwd));

		return retval.toString();
	}

	/**
	 * This method is called by PDI when a job entry needs to load its configuration
	 * from XML.
	 * 
	 * Please use org.pentaho.di.core.xml.XMLHandler to conveniently read from the
	 * XML node passed in.
	 * 
	 * Note: the implementation must call super.loadXML() to ensure correct behavior
	 * 
	 * @param entrynode
	 *            the XML node containing the configuration
	 * @param databases
	 *            the databases available in the job
	 * @param slaveServers
	 *            the slave servers available in the job
	 * @param rep
	 *            the repository connected to, if any
	 * @param metaStore
	 *            the metastore to optionally read from
	 */
	@Override
	public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep,
			IMetaStore metaStore) throws KettleXMLException {
		try {
			super.loadXML(entrynode, databases, slaveServers);
			bakfilename = XMLHandler.getTagValue(entrynode, "bakfilename");
			targetDBName = XMLHandler.getTagValue(entrynode, "targetdbname");
			host = XMLHandler.getTagValue(entrynode, "host");
			port = XMLHandler.getTagValue(entrynode, "port");
			dbname = XMLHandler.getTagValue(entrynode, "dbname");
			user = XMLHandler.getTagValue(entrynode, "user");
			passwd = XMLHandler.getTagValue(entrynode, "passwd");
		} catch (KettleException e) {
			throw new KettleXMLException("Unable to load job entry of type 'sqlbak' from XML node", e);
		}
	}

	/**
	 * This method is called by Spoon when a job entry needs to serialize its
	 * configuration to a repository. The repository implementation provides the
	 * necessary methods to save the job entry attributes.
	 *
	 * @param rep
	 *            the repository to save to
	 * @param id_job
	 *            the id to use for the job when saving
	 * @param metaStore
	 *            the metastore to optionally write to
	 */
	@Override
	public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
		try {
			rep.saveJobEntryAttribute(id_job, getObjectId(), "bakfilename", bakfilename);
			rep.saveJobEntryAttribute(id_job, getObjectId(), "targetdbname", targetDBName);
			rep.saveJobEntryAttribute(id_job, getObjectId(), "host", host);
			rep.saveJobEntryAttribute(id_job, getObjectId(), "port", port);
			rep.saveJobEntryAttribute(id_job, getObjectId(), "dbname", dbname);
			rep.saveJobEntryAttribute(id_job, getObjectId(), "user", user);
			rep.saveJobEntryAttribute(id_job, getObjectId(), "passwd", passwd);
		} catch (KettleDatabaseException dbe) {
			throw new KettleException("Unable to save job entry of type 'sql' to the repository for id_job=" + id_job,
					dbe);
		}
	}

	/**
	 * This method is called by PDI when a job entry needs to read its configuration
	 * from a repository. The repository implementation provides the necessary
	 * methods to read the job entry attributes.
	 * 
	 * @param rep
	 *            the repository to read from
	 * @param metaStore
	 *            the metastore to optionally read from
	 * @param id_jobentry
	 *            the id of the job entry being read
	 * @param databases
	 *            the databases available in the job
	 * @param slaveServers
	 *            the slave servers available in the job
	 */
	@Override
	public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
			List<SlaveServer> slaveServers) throws KettleException {
		try {
			bakfilename = rep.getJobEntryAttributeString(id_jobentry, "bakfilename");
			targetDBName = rep.getJobEntryAttributeString(id_jobentry, "targetdbname");
			host = rep.getJobEntryAttributeString(id_jobentry, "host");
			port = rep.getJobEntryAttributeString(id_jobentry, "port");
			dbname = rep.getJobEntryAttributeString(id_jobentry, "dbname");
			user = rep.getJobEntryAttributeString(id_jobentry, "user");
			passwd = rep.getJobEntryAttributeString(id_jobentry, "passwd");
		} catch (KettleDatabaseException dbe) {
			throw new KettleException(
					"Unable to load job entry of type 'sqlbak' from the repository with id_jobentry=" + id_jobentry, dbe);
		}
	}

	/**
	 * This method is called when it is the job entry's turn to run during the
	 * execution of a job. It should return the passed in Result object, which has
	 * been updated to reflect the outcome of the job entry. The execute() method
	 * should call setResult(), setNrErrors() and modify the rows or files attached
	 * to the result object if required.
	 *
	 * @param prev_result
	 *            The result of the previous execution
	 * @return The Result of the execution.
	 */
	public Result execute(Result previousResult, int nr) {
		Result result = previousResult;

		DatabaseMeta connection = new DatabaseMeta("SQLBAK", "MSSQLNATIVE", "Native (JDBC)", 
				host, dbname, port, user, passwd);
		connection.getAttributes().setProperty(MSSQLServerNativeDatabaseMeta.ATTRIBUTE_USE_INTEGRATED_SECURITY, false + "");
		logBasic("connection.getAttributes():" + connection.getAttributes());
		logBasic("目标数据库:{0}，{1}，{2}，{3}，{4}", host, dbname, port, user, passwd);
		Database db = new Database(this, connection);
		db.shareVariablesWith(this);
		try {
			db.connect(parentJob.getTransactionId(), null);
			String targetDBNamet = environmentSubstitute( targetDBName );
			Map<String, String> map = getDBLogicalName(db, bakfilename);
			String dbLogicalName = map.get("dbLogicalName");
			String logLogicalName = map.get("logLogicalName");
			logBasic("获取备份文件逻辑名称:{0}，{1}", dbLogicalName, logLogicalName);
			db.execStatement("CREATE DATABASE [" + targetDBNamet + "]");
			logBasic("创建数据库【{0}】成功", targetDBNamet);
			String dbFilePath = getDBFilePath(db, targetDBNamet);
			logBasic("数据库文件路径：{0}", dbFilePath);
			db.execStatement("ALTER DATABASE [" + targetDBNamet + "] SET OFFLINE WITH ROLLBACK IMMEDIATE");
			String sql = "restore database [" + targetDBNamet + "] from disk='" + bakfilename + "' WITH REPLACE, "
					+ "MOVE '" + dbLogicalName + "' TO '" + dbFilePath + targetDBNamet + ".mdf', "
					+ "MOVE '" + logLogicalName + "' TO '" + dbFilePath + targetDBNamet + "_log.ldf' ";
			db.execStatement(sql);
			logBasic("恢复数据库【{0}】成功，{1}", targetDBNamet, sql);
		} catch (Exception je) {
			result.setNrErrors(1);
			result.setLogText(je.getMessage());
			logError(BaseMessages.getString(PKG, "JobSQL.ErrorRunJobEntry", je.getMessage()), je);
		} finally {
			db.disconnect();
		}

		if (result.getNrErrors() == 0) {
			result.setResult(true);
		} else {
			result.setResult(false);
		}

		return result;
	}
	
	/**
	 * 获取数据库文件存放路径
	 * @param db
	 * @param dbName
	 * @return
	 * @throws KettleDatabaseException
	 * @throws SQLException
	 */
	public String getDBFilePath (Database db, String dbName) throws KettleDatabaseException, SQLException {
		String filePath = "C:/";
		try (ResultSet rs = db.openQuery("SELECT filename FROM master.dbo.sysdatabases where name = '" + dbName + "'")){
			if(rs.next()) {
				filePath = rs.getString("filename");
			}
			filePath = filePath.replace(dbName + ".mdf", "");
		} catch (SQLException e1) {
			logError("JobEntryBakProcessor关闭ResultSet错误", e1);
			throw e1;
		} catch (KettleDatabaseException e) {
			throw e;
		} 
		return filePath;
	}
	
	/**
	 * 获取备份文件中数据库的逻辑名称
	 * @param db
	 * @param bakFile
	 * @return
	 * @throws KettleDatabaseException
	 * @throws SQLException 
	 */
	private Map<String, String> getDBLogicalName (Database db, String bakFile) throws KettleDatabaseException, SQLException {
		Map<String, String> map = new HashMap<>();
		try (ResultSet rs = db.openQuery("RESTORE FILELISTONLY FROM DISK = N'" + bakFile + "'")){
			rs.next();
			map.put("dbLogicalName", rs.getString("LogicalName"));
			rs.next();
			map.put("logLogicalName", rs.getString("LogicalName"));
		} catch (SQLException e1) {
			logError("JobEntryBakProcessor关闭ResultSet错误", e1);
			throw e1;
		} catch (KettleDatabaseException e) {
			throw e;
		} 
		return map;
	}

	/**
	 * Returns true if the job entry offers a genuine true/false result upon
	 * execution, and thus supports separate "On TRUE" and "On FALSE" outgoing hops.
	 */
	public boolean evaluates() {
		return true;
	}

	/**
	 * Returns true if the job entry supports unconditional outgoing hops.
	 */
	public boolean isUnconditional() {
		return false;
	}

	public String getBakfilename() {
		return bakfilename;
	}

	public void setBakfilename(String bakfilename) {
		this.bakfilename = bakfilename;
	}

	public String getHost() {
		return host;
	}

	public void setHost(String host) {
		this.host = host;
	}

	public String getPort() {
		return port;
	}

	public void setPort(String port) {
		this.port = port;
	}

	public String getDbname() {
		return dbname;
	}

	public void setDbname(String dbname) {
		this.dbname = dbname;
	}

	public String getUser() {
		return user;
	}

	public void setUser(String user) {
		this.user = user;
	}

	public String getPasswd() {
		return passwd;
	}

	public void setPasswd(String passwd) {
		this.passwd = passwd;
	}

	public String getTargetDBName() {
		return targetDBName;
	}

	public void setTargetDBName(String targetDBName) {
		this.targetDBName = targetDBName;
	}
}
