package com.bys.ots.kettle;

import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.repository.kdr.KettleDatabaseRepository;
import org.pentaho.di.repository.kdr.KettleDatabaseRepositoryMeta;
import java.io.File;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import cn.hutool.core.io.FileUtil;
@Component
@Configuration
@EnableScheduling
public class kettleRun {
	
	 private final Logger logger = LoggerFactory.getLogger(kettleRun.class);
	
	  // @Scheduled(cron = "0 50 3 * * ?")2点对应10点
	  @Scheduled(cron = "0 30 2 * * ?")
		public void executeE1Job() throws KettleException {
			// kettle 数据库读取
          System.out.println("kettle 数据库E1读取");
			File jndiFile = FileUtil.file("/home/ubuntu/kettle/pdi-ce/data-integration/simple-jndi/");
			String jndiFilePath = jndiFile.getPath();
			Const.JNDI_DIRECTORY = jndiFilePath;

			KettleEnvironment.init();
			KettleDatabaseRepository repository = new KettleDatabaseRepository();
          // prod DatabaseMeta dataMeta = new DatabaseMeta("dev-etl-core", "MySQL", "JNDI", "scot-instance-prod.ci5eauweemom.rds.cn-north-1.amazonaws.com.cn", "dev-etl-core", "3306", "scot_user", "Thermo!2020");
            // dev  DatabaseMeta dataMeta = new DatabaseMeta("dev-etl-core", "MySQL", "JNDI", "scot-instance-dev.c9gqx1tossmu.rds.cn-north-1.amazonaws.com.cn", "dev-etl-core", "3306", "root", "12345678");
            DatabaseMeta dataMeta = new DatabaseMeta("dev-etl-core", "MySQL", "JNDI", "scot-instance-prod.ci5eauweemom.rds.cn-north-1.amazonaws.com.cn", "dev-etl-core", "3306", "scot_user", "Thermo!2020");

          // 数据库形式的资源库元对象
			KettleDatabaseRepositoryMeta kettleDatabaseRepositoryMeta = new KettleDatabaseRepositoryMeta();
			kettleDatabaseRepositoryMeta.setConnection(dataMeta);

			repository.init(kettleDatabaseRepositoryMeta);

			repository.connect("admin", "admin");
			if (repository.isConnected()) {
				logger.info("the Database connection successful");
			} else {
				logger.error("the Database connection failed");

			}
			try {
				RepositoryDirectoryInterface directory = repository.findDirectory("/dev");// 根据指定的字符串路径 找到目录
				// kettle java调用kettle Job
				// 加载指定的job
				JobMeta jobMeta = repository.loadJob(repository.getJobId("E1-Job", directory), null);
				Job job = new Job(repository, jobMeta);
				job.run();
				job.waitUntilFinished();
				job.setFinished(true);
				logger.info("->E1-job运行结果,job.getResult()");
				if (job.getErrors() > 0) {

					System.out.println("E1-job Run failed!");
				}
			} catch (Exception e) {
				e.printStackTrace();
			}

		}
	//@Scheduled(cron = "* */5 * * * ?")  1点对应9点
	 @Scheduled(cron = "0 10 1 * * ?")
	public void executeSapJob() throws KettleException {
		System.out.println("kettle 数据库sap读取");

		File jndiFile = FileUtil.file("/home/ubuntu/kettle/pdi-ce/data-integration/simple-jndi/");
		String jndiFilePath = jndiFile.getPath();
		Const.JNDI_DIRECTORY = jndiFilePath;

		KettleEnvironment.init();
		KettleDatabaseRepository repository = new KettleDatabaseRepository();
           DatabaseMeta dataMeta = new DatabaseMeta("dev-etl-core", "MySQL", "JNDI", "scot-instance-prod.ci5eauweemom.rds.cn-north-1.amazonaws.com.cn", "dev-etl-core", "3306", "scot_user", "Thermo!2020");

         // 数据库形式的资源库元对象
		KettleDatabaseRepositoryMeta kettleDatabaseRepositoryMeta = new KettleDatabaseRepositoryMeta();
		kettleDatabaseRepositoryMeta.setConnection(dataMeta);

		repository.init(kettleDatabaseRepositoryMeta);

		repository.connect("admin", "admin");
		if (repository.isConnected()) {
			logger.info("the Database connection successful");
		} else {
			logger.error("the Database connection failed");
		}
		try {
			RepositoryDirectoryInterface directory = repository.findDirectory("/dev");// 根据指定的字符串路径 找到目录
			// 加载指定的job
			JobMeta jobMeta = repository.loadJob(repository.getJobId("sap-job", directory), null);
			Job job = new Job(repository, jobMeta);
			job.run();
			job.waitUntilFinished();
			job.setFinished(true);

			logger.info("->sap-job运行结果,job.getResult()");
			System.out.println(job.getResult());
			if (job.getErrors() > 0) {
				System.out.println("sap-job Run failed!");
			}
		} catch (Exception e) {
			e.printStackTrace();
		}

	}

   // @Scheduled(cron = "0 40 1 * * ?")
    public void executeSapCSSJob() throws KettleException {
        System.out.println("kettle 数据库sapCss读取");

        File jndiFile = FileUtil.file("/home/ubuntu/kettle/pdi-ce/data-integration/simple-jndi/");
        String jndiFilePath = jndiFile.getPath();
        Const.JNDI_DIRECTORY = jndiFilePath;

        KettleEnvironment.init();
        KettleDatabaseRepository repository = new KettleDatabaseRepository();

//         DatabaseMeta dataMeta = new DatabaseMeta("dev-etl-core", "MySQL", "JNDI",
//                 "scot-instance-dev.c9gqx1tossmu.rds.cn-north-1.amazonaws.com.cn", "dev-etl-core", "3306", "scot_user",
//                 "Thermo!2020");
        DatabaseMeta dataMeta = new DatabaseMeta("dev-etl-core", "MySQL", "JNDI", "scot-instance-dev.c9gqx1tossmu.rds.cn-north-1.amazonaws.com.cn", "dev-etl-core", "3306", "root", "12345678");

        // 数据库形式的资源库元对象
        KettleDatabaseRepositoryMeta kettleDatabaseRepositoryMeta = new KettleDatabaseRepositoryMeta();
        kettleDatabaseRepositoryMeta.setConnection(dataMeta);

        repository.init(kettleDatabaseRepositoryMeta);

        repository.connect("admin", "admin");
        if (repository.isConnected()) {
            logger.info("the Database connection successful");
        } else {
            logger.error("the Database connection failed");
        }
        try {
            RepositoryDirectoryInterface directory = repository.findDirectory("/dev");// 根据指定的字符串路径 找到目录
            // 加载指定的job
            JobMeta jobMeta = repository.loadJob(repository.getJobId("sap-css-job", directory), null);
            Job job = new Job(repository, jobMeta);
            job.run();
            job.waitUntilFinished();
            job.setFinished(true);

            logger.info("->sap-job运行结果,job.getResult()");
            System.out.println(job.getResult());
            if (job.getErrors() > 0) {
                System.out.println("sap-job Run failed!");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
	
	
	/*public Object RepositoryCon() throws KettleException{
		
		//kettle 数据库读取
		
		   File jndiFile = FileUtil.file("/home/ubuntu/kettle/pdi-ce/data-integration/simple-jndi/");
		   String jndiFilePath = jndiFile.getPath();
		   Const.JNDI_DIRECTORY = jndiFilePath;
			
	        KettleEnvironment.init();
	        KettleDatabaseRepository repository = new KettleDatabaseRepository();
	       
	        DatabaseMeta dataMeta = new DatabaseMeta("kettle-core3", "MySQL", "JNDI", "scot-instance-dev.c9gqx1tossmu.rds.cn-north-1.amazonaws.com.cn", "etl-core", "3306",
	                "root", "12345678");
	        
	     // 数据库形式的资源库元对象
	         KettleDatabaseRepositoryMeta kettleDatabaseRepositoryMeta = new KettleDatabaseRepositoryMeta();
	         kettleDatabaseRepositoryMeta.setConnection(dataMeta);
	        
	        repository.init(kettleDatabaseRepositoryMeta);
	       
	        repository.connect("admin","admin");
	        if(repository.isConnected()){
				System.out.println("连接成功");
				return repository;
			}else{
				System.out.println("连接失败");
				return null;
			}       
	}*/
	
	
	
	
	
	
	
	
}
