package com.example.fg.kettle;


import org.pentaho.di.cluster.ClusterSchema;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.kdr.KettleDatabaseRepository;
import org.pentaho.di.repository.kdr.KettleDatabaseRepositoryMeta;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransExecutionConfiguration;
import org.pentaho.di.trans.TransMeta;

public class KettleUtil {

    private String connetionName = "localhost";
    private String databaseType = "MYSQL";
    private String connectionType = "Native(JDBC)";
    private String hostAddress = "localhost";
    private String databaseName = "test";
    private String databasePort = "3306";
    private String userName = "root";
    private String password = "toor@1234";
    private String repoName = "repo";
    private String repoUserName = "admin";
    private String repoPassword = "admin";
    private String repoJobDir = "/";
    private String repoTransDir = "/";
    private String slaveName = "master";
    private String slaveHostname = "192.168.10.147";
    private String slavePort = "8080";
    private String slaveUsername = "cluster";
    private String slavePassword = "cluster";

    public String getConnetionName() {
        return connetionName;
    }

    public void setConnetionName(String connetionName) {
        this.connetionName = connetionName;
    }

    public String getDatabaseType() {
        return databaseType;
    }

    public void setDatabaseType(String databaseType) {
        this.databaseType = databaseType;
    }

    public String getConnectionType() {
        return connectionType;
    }

    public void setConnectionType(String connectionType) {
        this.connectionType = connectionType;
    }

    public String getHostAddress() {
        return hostAddress;
    }

    public void setHostAddress(String hostAddress) {
        this.hostAddress = hostAddress;
    }

    public String getDatabaseName() {
        return databaseName;
    }

    public void setDatabaseName(String databaseName) {
        this.databaseName = databaseName;
    }

    public String getDatabasePort() {
        return databasePort;
    }

    public void setDatabasePort(String databasePort) {
        this.databasePort = databasePort;
    }

    public String getUserName() {
        return userName;
    }

    public void setUserName(String userName) {
        this.userName = userName;
    }

    public String getPassword() {
        return password;
    }

    public void setPassword(String password) {
        this.password = password;
    }

    public String getRepoName() {
        return repoName;
    }

    public void setRepoName(String repoName) {
        this.repoName = repoName;
    }

    public String getRepoUserName() {
        return repoUserName;
    }

    public void setRepoUserName(String repoUserName) {
        this.repoUserName = repoUserName;
    }

    public String getRepoPassword() {
        return repoPassword;
    }

    public void setRepoPassword(String repoPassword) {
        this.repoPassword = repoPassword;
    }

    public String getRepoJobDir() {
        return repoJobDir;
    }

    public void setRepoJobDir(String repoJobDir) {
        this.repoJobDir = repoJobDir;
    }

    public String getRepoTransDir() {
        return repoTransDir;
    }

    public void setRepoTransDir(String repoTransDir) {
        this.repoTransDir = repoTransDir;
    }

    public String getSlaveName() {
        return slaveName;
    }

    public void setSlaveName(String slaveName) {
        this.slaveName = slaveName;
    }

    public String getSlaveHostname() {
        return slaveHostname;
    }

    public void setSlaveHostname(String slaveHostname) {
        this.slaveHostname = slaveHostname;
    }

    public String getSlavePort() {
        return slavePort;
    }

    public void setSlavePort(String slavePort) {
        this.slavePort = slavePort;
    }

    public String getSlaveUsername() {
        return slaveUsername;
    }

    public void setSlaveUsername(String slaveUsername) {
        this.slaveUsername = slaveUsername;
    }

    public String getSlavePassword() {
        return slavePassword;
    }

    public void setSlavePassword(String slavePassword) {
        this.slavePassword = slavePassword;
    }

    /**
     * 调用trans文件
     *
     * @param transFileName
     * @throws Exception
     */
    public static void callNativeTrans(String transFileName) throws Exception {
        callNativeTransWithParams(null, transFileName);
    }

    /**
     * 调用trans文件 带参数的
     *
     * @param params
     * @param transFileName
     * @throws Exception
     */
    public static void callNativeTransWithParams(String[] params, String transFileName) throws Exception {
        // 初始化
        KettleEnvironment.init();
        EnvUtil.environmentInit();
        TransMeta transMeta = new TransMeta(transFileName);
        //转换
        Trans trans = new Trans(transMeta);
        //执行
        trans.execute(params);
        //等待结束
        trans.waitUntilFinished();
        //抛出异常
        if (trans.getErrors() > 0) {
            throw new Exception("There are errors during transformation exception!(传输过程中发生异常)");
        }
    }

    /**
     * 调用job文件
     *
     * @param jobName
     * @throws Exception
     */
    public static void callNativeJob(String jobName) throws Exception {
        // 初始化
        KettleEnvironment.init();

        JobMeta jobMeta = new JobMeta(jobName, null);
        Job job = new Job(null, jobMeta);
        //向Job 脚本传递参数，脚本中获取参数值：${参数名}
        //job.setVariable(paraname, paravalue);
        job.start();
        job.waitUntilFinished();
        if (job.getErrors() > 0) {
            throw new Exception("There are errors during job exception!(执行job发生异常)");
        }
    }

    /**
     * 资源库连接
     *
     * @return 连接到的资源库
     * @throws KettleException
     */
    public static Object RepositoryCon() throws KettleException {
        // 初始化
        // EnvUtil.environmentInit();
        KettleEnvironment.init();
        // 数据库连接元对象
        DatabaseMeta dataMeta = new DatabaseMeta(new KettleUtil().getConnetionName(), new KettleUtil().getDatabaseType(), new KettleUtil().getConnetionName(), new KettleUtil().getHostAddress(), new KettleUtil().getDatabaseName(), new KettleUtil().getDatabasePort(), new KettleUtil().getUserName(), new KettleUtil().getPassword());
        // 数据库形式的资源库元对象
        KettleDatabaseRepositoryMeta repInfo = new KettleDatabaseRepositoryMeta();
        //
        repInfo.setConnection(dataMeta);
        repInfo.setName(new KettleUtil().getRepoName());
        // 数据库形式的资源库对象
        KettleDatabaseRepository rep = new KettleDatabaseRepository();
        // 用资源库元对象初始化资源库对象
        rep.init(repInfo);
        // 连接到资源库
        rep.connect(new KettleUtil().getRepoUserName(), new KettleUtil().getRepoPassword());// 默认的连接资源库的用户名和密码
        if (rep.isConnected()) {
            System.out.println("连接成功");
            return rep;
        } else {
            System.out.println("连接失败");
            return null;
        }
    }

    /**
     * 以子服务方式执行资源库中的job
     *
     * @param rep
     * @param jobName
     */
    public static void runJob(KettleDatabaseRepository rep, String jobName) {
        try {
            RepositoryDirectoryInterface dir = rep.findDirectory(new KettleUtil().getRepoJobDir());// 根据指定的字符串路径 找到目录
            // 加载指定的job
            JobMeta jobMeta = rep.loadJob(rep.getJobId(jobName, dir), null);
            Job job = new Job(rep, jobMeta);

            // 设置参数
            //jobMeta.setParameterValue("method", "update");
            //jobMeta.setParameterValue("tsm5", "07bb40f7200448b3a544786dc5e28845");
            //jobMeta.setParameterValue("args"," {'fkid':'07bb40f7200448b3a544786dc5e28845','svctype':'Diffwkrlifehelp','content':'更新3','sysuuid':'01ee0e61f357476b8dbb4be49ddecc77','uid':'1033','role':'3999','posi':'2999'}");

            job.setLogLevel(LogLevel.BASIC);
            //设置slaveserver信息
            SlaveServer ssi = new SlaveServer();
            ssi.setHostname(new KettleUtil().getSlaveHostname());
            ssi.setPort(new KettleUtil().getSlavePort());
            ssi.setName(new KettleUtil().getSlaveName());
            ssi.setUsername(new KettleUtil().getSlaveUsername());
            ssi.setPassword(new KettleUtil().getSlavePassword());
            //为job设置slaveserve
            job.setExecutingServer(new KettleUtil().getSlaveName());
            //ClusterSchema cluster = jobgetTransMeta().findFirstUsedClusterSchema();
            JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
            jobExecutionConfiguration.setExecutingLocally(false);
            jobExecutionConfiguration.setExecutingRemotely(true);
            jobExecutionConfiguration.setRemoteServer(ssi);
            jobExecutionConfiguration.setRepository(rep);

            String carteObjectId = Job.sendToSlaveServer(jobMeta, jobExecutionConfiguration, rep, null);
            System.out.println(carteObjectId);

             /*普通执行
             job.run();
             job.waitUntilFinished();// 等待job执行完；
             job.setFinished(true);
             System.out.println(job.getResult());
             */
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static void runTrans(KettleDatabaseRepository rep, String transName) {
        try {
            RepositoryDirectoryInterface dir = rep.findDirectory(new KettleUtil().getRepoTransDir());//根据指定的字符串路径 找到目录
            TransMeta tmeta = rep.loadTransformation(rep.getTransformationID(transName, dir), null);
            //设置参数
            //tmeta.setParameterValue("", "");
            Trans trans = new Trans(tmeta);
            ClusterSchema cluster = trans.getTransMeta().findFirstUsedClusterSchema();
            if (cluster != null) {
                TransExecutionConfiguration executionConfiguration = new TransExecutionConfiguration();
                executionConfiguration.setExecutingLocally(false);
                executionConfiguration.setExecutingRemotely(false);
                executionConfiguration.setExecutingClustered(true);  //如果有就设置以集群方式运行
                executionConfiguration.setClusterPosting(true);
                executionConfiguration.setClusterPreparing(true);
                executionConfiguration.setClusterStarting(true);
                executionConfiguration.setClusterShowingTransformation(false);
                executionConfiguration.setSafeModeEnabled(false);
                executionConfiguration.setRepository(rep);
                executionConfiguration.setLogLevel(LogLevel.BASIC);
                executionConfiguration.setVariables(trans.getTransMeta());
                TransMeta transMeta = trans.getTransMeta();
                try {
                    Trans.executeClustered(transMeta, executionConfiguration);
                    System.out.println("执行完毕");
                } catch (Exception e) {
                    e.printStackTrace();
                }
            } else {/*扩展元数据注入的转换可以集群运行--结束代码 */
                trans.startThreads();
                while (!trans.isFinished() && !trans.isStopped()) {

                }
                if (trans.getErrors() > 0) {
                    System.out.println("有异常");
                }
                System.exit(0);
            }

            /*普通执行
            trans.execute(null);//执行trans
            trans.waitUntilFinished();
            if(trans.getErrors()>0){
                System.out.println("有异常");
            }
            */
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


}