package com.zhuziym.ketfile;

import org.apache.commons.lang.RandomStringUtils;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.logging.LoggingBuffer;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.RepositoryPluginType;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.repository.RepositoriesMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryMeta;

/**
 * @author study
 * @version 1.0
 * @date 2021/6/25 17:15
 */
public class RunningJobs {
    public static RunningJobs instance;

    /**
     * kettle_home是在 设定目录下面的.kettle文件夹
     */
    public static void initKettleHome(String confFile) {
        String ketPropFile = RunningJobs.class.getClassLoader().getResource(confFile).getFile();
        // TODO JNDIhome
        System.getProperties().put("KETTLE_JNDI_ROOT", ketPropFile + "/simple-jndi");
        System.getProperties().put("KETTLE_HOME", ketPropFile);
    }

    /**
     * https://www.javatips.net/api/pdi-sdk-plugins-master/kettle-sdk-embedding-samples/src/main/java/org/pentaho/di/sdk/samples/embedding/RunningJobs.java
     * https://www.javatips.net/api/org.pentaho.di.job.job
     */
    public static void main(String[] args) {
        // Kettle Environment must always be initialized first when using PDI
        // It bootstraps the PDI engine by loading settings, appropriate plugins
        // etc.
        try {
            initKettleHome("conf");
            KettleEnvironment.init(true);
            EnvUtil.environmentInit();
        } catch (KettleException e) {
            e.printStackTrace();
            return;
        }

        // Create an instance of this demo class for convenience
        instance = new RunningJobs();

        // run a job from the file system
        String jobPath = System.getProperty("user.dir") + "/kettleFile/job_mkdir.kjb";
        Job job = instance.runJobFromFileSystem(jobPath);

        // retrieve logging appender
        LoggingBuffer appender = KettleLogStore.getAppender();
        // retrieve logging lines for job
        String logText = appender.getBuffer(job.getLogChannelId(), false).toString();

        // report on logged lines
        System.out.println("************************************************************************************************");
        System.out.println("LOG REPORT: Job generated the following log lines:\n");
        System.out.println(logText);
        System.out.println("END OF LOG REPORT");
        System.out.println("************************************************************************************************");

        // run a job from the repository
        // NOTE: before running the repository example, you need to make sure
        // that the repository and job exist,
        // and can be accessed by the user and password used
        // uncomment and run after you've got a test repository in place

        // instance.runJobFromRepository("test-repository", "/home/joe",
        // "parametrized_job", "joe", "password");

    }

    /**
     * This method executes a job defined in a kjb file
     * <p>
     * It demonstrates the following:
     * <p>
     * - Loading a job definition from a kjb file - Setting named parameters for
     * the job - Setting the log level of the job - Executing the job, waiting
     * for it to finish - Examining the result of the job
     *
     * @param filename the file containing the job to execute (kjb file)
     * @return the job that was executed, or null if there was an error
     */
    public Job runJobFromFileSystem(String filename) {

        try {
            System.out.println("***************************************************************************************");
            System.out.println("Attempting to run job " + filename + " from file system");
            System.out.println("***************************************************************************************\n");
            // Loading the job file from file system into the JobMeta object.
            // The JobMeta object is the programmatic representation of a job
            // definition.
            JobMeta jobMeta = new JobMeta(filename, null);
            final String filename1 = jobMeta.getFilename();

            // The next section reports on the declared parameters and sets them
            // to arbitrary values
            // for demonstration purposes
            System.out.println("Attempting to read and set named parameters");
            String[] declaredParameters = jobMeta.listParameters();
            for (int i = 0; i < declaredParameters.length; i++) {
                String parameterName = declaredParameters[i];

                // determine the parameter description and default values for
                // display purposes
                String description = jobMeta.getParameterDescription(parameterName);
                String defaultValue = jobMeta.getParameterDefault(parameterName);
                // set the parameter value to an arbitrary string
                String parameterValue = RandomStringUtils.randomAlphanumeric(10);

                String output = String.format("Setting parameter %s to \"%s\" [description: \"%s\", default: \"%s\"]",
                        parameterName, parameterValue, description, defaultValue);
                System.out.println(output);

                // assign the value to the parameter on the job
                jobMeta.setParameterValue(parameterName, parameterValue);
            }
            // Creating a Job object which is the programmatic representation of
            // a job
            // A Job object can be executed, report success, etc.
            Job job = new Job(null, jobMeta);

            // adjust the log level
            job.setLogLevel(LogLevel.MINIMAL);
          /*  final String logfile = job.getVariable("logfile");
            final String variable = job.getVariable("Internal.Entry.Current.Directory");
            System.out.println(logfile);
            System.out.println(variable);
            */

            System.out.println("\nStarting job");

            // starting the job thread, which will execute asynchronously
            job.start();

            // waiting for the job to finish
            job.waitUntilFinished();

            // retrieve the result object, which captures the success of the job
            Result result = job.getResult();

            // report on the outcome of the job
            String outcome = String.format("\nJob %s executed with result: %s and %d errors\n",
                    filename, result.getResult(), result.getNrErrors());
            System.out.println(outcome);

            return job;
        } catch (Exception e) {
            // something went wrong, just log and return
            e.printStackTrace();
            return null;
        }
    }

    /**
     * This method executes a job stored in a repository.
     * <p>
     * It demonstrates the following:
     * <p>
     * - Loading a job definition from a repository - Setting named parameters
     * for the job - Setting the log level of the job - Executing the job,
     * waiting for it to finish - Examining the result of the job
     * <p>
     * When calling this method, kettle will look for the given repository name
     * in $KETTLE_HOME/.kettle/repositories.xml
     * <p>
     * If $KETTLE_HOME is not set explicitly, the user's home directory is
     * assumed
     *
     * @param repositoryName the name of the repository to use
     * @param directory      the directory the job definition lives in (i.e. "/home/joe")
     * @param jobName        the name of the job to execute (i.e. "parametrized_job")
     * @param username       the username to connect with
     * @param password       the password to connect with
     * @return the job that was executed, or null if there was an error
     */
    public Job runJobFromRepository(String repositoryName, String directory, String jobName, String username, String password) {

        try {
            System.out.println("***************************************************************************************");
            System.out.println("Attempting to run job " + directory + "/" + jobName + " from repository: " + repositoryName);
            System.out.println("***************************************************************************************\n");

            // read the repositories.xml file to determine available
            // repositories
            RepositoriesMeta repositoriesMeta = new RepositoriesMeta();
            repositoriesMeta.readData();

            // find the repository definition using its name
            RepositoryMeta repositoryMeta = repositoriesMeta.findRepository(repositoryName);

            if (repositoryMeta == null) {
                throw new KettleException("Cannot find repository \"" + repositoryName + "\". Please make sure it is defined in your " + Const.getKettleUserRepositoriesFile() + " file");
            }

            // use the plug-in system to get the correct repository
            // implementation
            // the actual implementation will vary depending on the type of
            // given
            // repository (File-based, DB-based, EE, etc.)
            PluginRegistry registry = PluginRegistry.getInstance();
            Repository repository = registry.loadClass(RepositoryPluginType.class, repositoryMeta, Repository.class);

            // connect to the repository using given username and password
            repository.init(repositoryMeta);
            repository.connect(username, password);

            // find the directory we want to load from
            RepositoryDirectoryInterface tree = repository.loadRepositoryDirectoryTree();
            RepositoryDirectoryInterface dir = tree.findDirectory(directory);

            if (dir == null) {
                throw new KettleException("Cannot find directory \"" + directory + "\" in repository.");
            }

            // load latest revision of the job
            // The JobMeta object is the programmatic representation of a job
            // definition.
            JobMeta jobMeta = repository.loadJob(jobName, dir, null, null);

            // The next section reports on the declared parameters and sets them
            // to arbitrary values
            // for demonstration purposes
            System.out.println("Attempting to read and set named parameters");
            String[] declaredParameters = jobMeta.listParameters();
            for (int i = 0; i < declaredParameters.length; i++) {
                String parameterName = declaredParameters[i];

                // determine the parameter description and default values for
                // display purposes
                String description = jobMeta.getParameterDescription(parameterName);
                String defaultValue = jobMeta.getParameterDefault(parameterName);
                // set the parameter value to an arbitrary string
                String parameterValue = RandomStringUtils.randomAlphanumeric(10);

                String output = "Setting parameter " + parameterName + " to \"" + parameterValue + "\" [description: \"" + description + "\", default: \"" + defaultValue + "\"]";
                System.out.println(output);

                // assign the value to the parameter on the job
                jobMeta.setParameterValue(parameterName, parameterValue);

            }

            // Creating a Job object which is the programmatic representation of
            // a job
            // A Job object can be executed, report success, etc.
            Job job = new Job(repository, jobMeta);

            // adjust the log level
            job.setLogLevel(LogLevel.MINIMAL);

            System.out.println("\nStarting job");

            // starting the job, which will execute asynchronously
            job.start();

            // waiting for the job to finish
            job.waitUntilFinished();

            // retrieve the result object, which captures the success of the job
            Result result = job.getResult();

            // report on the outcome of the job
            String outcome = "\nJob " + directory + "/" + jobName + " executed with result: " + result.getResult() + " and " + result.getNrErrors() + " errors";
            System.out.println(outcome);

            return job;
        } catch (Exception e) {
            // something went wrong, just log and return
            e.printStackTrace();
            return null;
        }
    }
}
