package com.yuxue.util;

import lombok.extern.slf4j.Slf4j;

import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.plugins.PluginFolder;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.TransMeta.TransformationType;
import org.pentaho.di.trans.step.*;
import org.pentaho.di.trans.steps.insertupdate.InsertUpdateMeta;
import org.pentaho.di.trans.steps.sql.ExecSQLMeta;
import org.pentaho.di.trans.steps.tableinput.TableInputMeta;
import org.pentaho.di.trans.steps.tableoutput.TableOutputMeta;
import org.springframework.util.StringUtils;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;


/**
 * kettle工具包
 * @author yuxue
 * @date 2021-01-27 14:28
 */
@Slf4j
public class KettleUtil {


    /**
     * 加载指定位置的插件
     * 如果不指定，则默认加载项目根目录下的plugins下的插件，及用户/.kettle/plugins下的插件
     * 加载一些必要插件即可， 不需要全部添加，，这个包的插件在900多M，
     * @param path
     */
    public static void initPlugins(String path) {
        if(StringUtils.isEmpty(path)) {
            path = "D:\\data-integration\\plugins";
        }
        StepPluginType.getInstance().getPluginFolders().add(new PluginFolder(path, false, true));
    }

    /**
     * 初始化kettle运行环境
     */
    static {
        try {
            KettleEnvironment.init();
        } catch (KettleException e) {
            e.printStackTrace();
        }
    }

    /**
     * 运行 生成的.kjb(作业)文件
     * @param jobname   d:/test/test.kjb
     * @param param 需要向job脚本传递的参数map
     */
    public static void runJob(String jobname, Map<String, String> param) {
        try {
            JobMeta jobMeta = new JobMeta(jobname, null);
            runJob(jobMeta, param);
        } catch (KettleException e) {
            System.err.println(e);
        }
    }

    /**
     * 执行job作业
     * @param jobMeta
     * @param param
     */
    public static void runJob(JobMeta jobMeta, Map<String, String> param) {
        Job job = new Job(null, jobMeta);
        // 向Job 脚本传递参数，脚本中获取参数值：${参数名}
        if(null != param && !param.isEmpty()) {
            param.entrySet().forEach(entry->{
                job.setVariable(entry.getKey(), entry.getValue());
            });
        }
        job.start();
        job.waitUntilFinished();
        if (job.getErrors() > 0) {
            System.err.println("ErrorCode==>" + job.getErrors());
        }
    }


    /**
     * 运行kettle 生成的 .ktr(转换)文件
     * @param filename  d:/test/test.ktr
     * @param param
     */
    public static void runTrans(String filename, Map<String, String> param) {
        try {
            TransMeta transMeta = new TransMeta(filename);
            runTrans(transMeta, param);
        } catch (KettleXMLException e) {
            e.printStackTrace();
        } catch (KettleException e) {
            e.printStackTrace();
        }
    }


    /**
     * 运行Transformation转换
     * @param transMeta
     * @param param
     */
    public static boolean runTrans(TransMeta transMeta,  Map<String, String> param) {
        try {
            Trans trans = new Trans(transMeta);
            trans.setLogLevel(LogLevel.DETAILED);

            // 向ktr 脚本传递参数，脚本中获取参数值：${参数名}
            if(null != param && !param.isEmpty()) {
                param.entrySet().forEach(entry->{
                    trans.setVariable(entry.getKey(), entry.getValue());
                });
            }
            trans.prepareExecution(null);
            trans.startThreads();
            trans.waitUntilFinished();

            if (trans.getErrors() != 0) {
                log.error("<b>执行kettle脚本异常</b>===>" + trans.getErrors());
            } else {
                return true;
            }
        } catch (KettleXMLException e) {
            e.printStackTrace();
            log.error("<br/>执行kettle脚本异常===>" + e.getMessage());
        } catch (KettleException e) {
            e.printStackTrace();
            log.error("<br/>执行kettle脚本异常===>" + e.getMessage());
        }
        return false;
    }




    /**
     * 增量/全量同步表数据 trans
     * @param transformationName
     * @param sourceDBInfo
     * @param sourceTableName
     * @param sourceFields
     * @param targetDBInfo
     * @param targetTableName
     * @param targetFields
     * @param clearRepeatData
     * @param deleteSql
     * @param selectSql
     * @return
     */
    public static final TransMeta syncData(String transformationName,
            DatabaseMeta sourceDBInfo, String sourceTableName, String[] sourceFields,
            DatabaseMeta targetDBInfo, String targetTableName, String[] targetFields,
            Boolean clearRepeatData,  String deleteSql, String selectSql) {

        // 创建新的转换
        TransMeta transMeta = new TransMeta();
        transMeta.setName(transformationName);
        transMeta.setTransformationType(TransformationType.Normal);

        String report = sourceDBInfo.testConnection();
        System.err.println("检查数据库连接===>" + report);

        // 设置数据源
        transMeta.addDatabase(sourceDBInfo);
        transMeta.addDatabase(targetDBInfo);

        // 步骤：查询数据
        TableInputMeta ti = new TableInputMeta();
        ti.setDatabaseMeta(sourceDBInfo);
        ti.setSQL(selectSql);
        ti.setVariableReplacementActive(true);  // 替换参数

        String stepName = "从 [" + sourceTableName + "] 表中读取数据";
        String stepId = "step0_readData";
        StepMeta fromStep = new StepMeta(stepId, stepName, ti);
        fromStep.setLocation(200, 300);
        fromStep.setDraw(true);
        fromStep.setDescription("查询数据 from table [" + sourceTableName + "] on database [" + sourceDBInfo + "]");
        transMeta.addStep(fromStep);

        // 步骤：删除可能存在的重复数据
        if(clearRepeatData) { // 清理重复数据; 可以根据需要，不清理，或者通过before Insert触发器 进行去重操作
            ExecSQLMeta delete = new ExecSQLMeta();
            delete.setSql(deleteSql);
            delete.setDatabaseMeta(targetDBInfo);
            String[] arguments = {}; // 执行参数
            delete.setArguments(arguments);
            delete.setVariableReplacementActive(true);  // 替换参数

            StepMeta step = new StepMeta("step2", "删除重复数据", delete);
            step.setLocation(200, 200);
            step.setDescription("删除重复数据");
            transMeta.addStep(step);

            // 创建连接线
            TransHopMeta hop = new TransHopMeta(step, fromStep);
            transMeta.addTransHop(hop);
        }

        // 步骤：批量写入数据
        String tostepname = "write to [" + targetTableName + "]";
        TableOutputMeta toi = new TableOutputMeta();
        toi.setDatabaseMeta(targetDBInfo);
        toi.setTableName(targetTableName);
        toi.setCommitSize(1000); // 每个批次1000条数据
        toi.setFieldDatabase(sourceFields);
        toi.setFieldStream(targetFields);
        toi.setTruncateTable(false);
        toi.setUseBatchUpdate(true);

        StepMeta toStep = new StepMeta("OUT_PUTTABLE_" + targetTableName, tostepname, (StepMetaInterface) toi);
        toStep.setLocation(550, 100);

        toStep.setDraw(true);
        toStep.setDescription("批量插入数据 to table [" + targetTableName + "] on database [" + targetDBInfo + "]");
        transMeta.addStep(toStep);

        // 创建连接线
        TransHopMeta hi = new TransHopMeta(fromStep, toStep);
        transMeta.addTransHop(hi);

        /*try {
            // 输出脚本文件
            System.err.println("===>" + transMeta.getXML());
        } catch (KettleException e) {
            e.printStackTrace();
        }*/

        return transMeta;
    }



    /**
     * InsertUpdate trans
     * 同步数据，入库前判断是insert还是update操作，相当于是单条数据同步，有性能问题
     * 对于数据量大的同步，不适用
     * @param transformationName
     * @param sourceDBInfo
     * @param sourceTableName
     * @param sourceFields
     * @param targetDBInfo
     * @param targetTableName
     * @param targetFields
     * @param selectSql
     * @param updateBypassed
     * @return
     */
    public static final TransMeta insertOrUpdateData(String transformationName,
                                           DatabaseMeta sourceDBInfo, String sourceTableName, String[] sourceFields,
                                           DatabaseMeta targetDBInfo, String targetTableName, String[] targetFields,
                                           String[] idColumn, String selectSql, Boolean updateBypassed) {
        // 创建新的转换
        TransMeta transMeta = new TransMeta();
        transMeta.setName(transformationName);
        transMeta.setTransformationType(TransformationType.Normal);

        // 设置数据源
        transMeta.addDatabase(sourceDBInfo);
        transMeta.addDatabase(targetDBInfo);

        // 步骤：查询数据
        TableInputMeta ti = new TableInputMeta();
        ti.setDatabaseMeta(sourceDBInfo);
        ti.setSQL(selectSql);
        ti.setVariableReplacementActive(true);  // 替换参数

        String stepName = "从[" + sourceTableName + "]表中读取数据";
        StepMeta fromStep = new StepMeta("READ_TABLE_" + sourceTableName, stepName, ti);
        fromStep.setLocation(200, 300);
        fromStep.setDraw(true);
        fromStep.setDescription("查询数据 from table [" + sourceTableName + "] on database [" + sourceDBInfo + "]");
        transMeta.addStep(fromStep);

        // 步骤：批量写入数据
        String tostepname = "插入/更新数据到表[" + targetTableName + "]";

        InsertUpdateMeta ipm = new InsertUpdateMeta();
        ipm.setDatabaseMeta(targetDBInfo);
        ipm.setTableName(targetTableName);
        // ipm.setSchemaName(""); // 设置数据库模式

        // 设置对比关键字段
        String[] keyStream= new String[]{idColumn[0]}; // 输入表，输入流的字段
        String[] keyLookup= new String[]{idColumn[1]}; // 目标表字段
        String[] keyCondition= new String[1];
        for (int i = 0; i < 1; i++) {
            keyCondition[i] = "=";
        }

        ipm.setKeyStream(keyStream); // 输入表，输入流的字段
        ipm.setKeyLookup(keyLookup); // 目标表字段，输出
        ipm.setKeyCondition(keyCondition);
        ipm.setKeyStream2(new String[idColumn.length]); // name2

        // 设置详细的字段信息
        ipm.setUpdateStream(sourceFields); // 输入表，输入流的字段
        ipm.setUpdateLookup(targetFields); // 目标表字段，输出
        Boolean[] bl = new Boolean[sourceFields.length];
        for (int i = 0; i < bl.length; i++) {
            bl[i] = true;
        }
        ipm.setUpdate(bl);
        ipm.setCommitSize("100");
        ipm.setUpdateBypassed(updateBypassed);

        StepMeta toStep = new StepMeta("INSERT_UPDATE_TABLE_" + targetTableName, tostepname, (StepMetaInterface) ipm);
        toStep.setLocation(550, 100);

        toStep.setDraw(true);
        toStep.setDescription("插入/更新数据 to table [" + targetTableName + "] on database [" + targetDBInfo + "]");
        transMeta.addStep(toStep);

        // 创建连接线
        TransHopMeta hi = new TransHopMeta(fromStep, toStep);
        transMeta.addTransHop(hi);

        /*try {
            // 输出脚本文件
            System.err.println("===>" + transMeta.getXML());
        } catch (KettleException e) {
            e.printStackTrace();
        }*/

        return transMeta;
    }


    /**
     * 执行sql
     * 有安全性问题，需要对输入的sql做验证
     * @param transformationName
     * @param sourceDBInfo
     * @param sql
     * @return
     */
    public static final TransMeta execSQL(String transformationName, DatabaseMeta sourceDBInfo, String sql) throws KettleException {
        String sql1 = sql.toLowerCase();
        if(sql1.contains("drop") || sql1.contains("truncate") || sql1.contains("alter")
                || sql1.contains("create")){
            throw new KettleException("输入的sql不合法，不允许包含以下关键字：【drop|truncate|alter|create】");
        }

        // 创建新的转换
        TransMeta transMeta = new TransMeta();
        transMeta.setName(transformationName);
        transMeta.setTransformationType(TransformationType.Normal);

        // 设置数据源
        transMeta.addDatabase(sourceDBInfo);

        // 步骤：删除可能存在的重复数据
        ExecSQLMeta esm = new ExecSQLMeta();
        esm.setSql(sql);
        esm.setDatabaseMeta(sourceDBInfo);
        String[] arguments = {}; // 执行参数
        esm.setArguments(arguments);
        esm.setVariableReplacementActive(true);  // 替换参数

        StepMeta step = new StepMeta("execSQL", "执行sql", esm);
        step.setLocation(200, 200);
        step.setDraw(true);
        step.setDescription("执行sql");
        transMeta.addStep(step);
        try {
            // 输出脚本文件
            System.err.println("===>" + transMeta.getXML());
        } catch (KettleException e) {
            e.printStackTrace();
        }
        return transMeta;
    }

    /**
     * 获取同步数据的增量标志位数据
     * @param transName
     * @param targetDBInfo
     * @param getFlagSql
     * @return
     */
    public static TransMeta getSyncFlagTrans(String transName, DatabaseMeta targetDBInfo, String getFlagSql) {
        // 创建新的转换
        TransMeta transMeta = new TransMeta();
        transMeta.setName(transName);
        transMeta.setTransformationType(TransformationType.Normal);
        transMeta.addDatabase(targetDBInfo);

        // 表输入
        TableInputMeta getFlag = new TableInputMeta();
        getFlag.setDatabaseMeta(targetDBInfo);
        getFlag.setSQL(getFlagSql);

        // 步骤节点
        StepMeta step = new StepMeta("step0", "获取增量标志位", getFlag);
        step.setLocation(500, 200);
        step.setDescription("获取增量标志位，时间或者数值型id字段");
        step.setDraw(true);

        transMeta.addStep(step);

        /*try {
            // 输出脚本文件
            System.err.println("===>" + transMeta.getXML());
        } catch (KettleException e) {
            e.printStackTrace();
        }*/

        return transMeta;
    }



    /**
     * 获取转换的结果集，
     * 获取最后一个节点的结果集
     * @param transMeta
     * @param transName
     * @param targetDBInfo
     * @param sql
     * @return
     */
    public static List<Map<String, String>> getJsonDataByKettle(TransMeta transMeta, String transName, DatabaseMeta targetDBInfo, String sql) {
        // 创建ktr元对象
        Trans trans = new Trans(transMeta);
        trans.setLogLevel(LogLevel.DETAILED);
        // 异常处理
        try {
            trans.prepareExecution(null);
            trans.execute(null);
            // 记录最后一个步骤的数据
            final List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
            RowListener rowListner = new RowListener() {
                public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row)
                        throws KettleStepException {
                    rows.add(new RowMetaAndData(rowMeta, row));
                }

                public void rowReadEvent(RowMetaInterface arg0, Object[] arg1)
                        throws KettleStepException {

                }

                public void errorRowWrittenEvent(RowMetaInterface arg0,
                        Object[] arg1) throws KettleStepException {

                }
            };
            List<StepMetaDataCombi> steps = trans.getSteps();
            String stepname = steps.get(steps.size() - 1).stepname; // 最后一个节点绑定结果集
            StepInterface stepInterface = trans.findRunThread(stepname);
            stepInterface.addRowListener(rowListner);

            // 等待执行完毕
            trans.waitUntilFinished();

            List<Map<String,String>> lists=new ArrayList<Map<String,String>>();

            for (int i = 0; i < rows.size(); i++) {
                RowMetaAndData rmad = rows.get(i);
                String[] fields=rmad.getRowMeta().getFieldNames();
                Map<String,String> map =new HashMap<String,String>();
                for (String field : fields) {
                    map.put(field, rmad.getString(field, null));
                }
                lists.add(map);
            }
            if (trans.getErrors() > 0) {
                System.err.println("transformation error");
                System.err.println("ErrorCode==>" + trans.getErrors());
            }
            return lists;
        } catch (KettleException e) {
            e.printStackTrace();
        }
        return null;
    }



    public static void main(String[] args) {

        String transName = "数据同步测试";
        DatabaseMeta sourceDBInfo = new DatabaseMeta("db_from_sqlserver", "MSSQLNATIVE", "Native", "172.17.2.46", "GZD_Road_Gate", "1433", "linyan", "linyan");
        String sourceTableName = "tsm_trafficflow_lane_h";
        String[] sourceFields = {"date_id", "stationId", "stationName", "laneType", "enExType", "laneId", "laneNo", "status", "enOverLoad", "flow",
                "smVeh", "midVeh", "bigVeh", "trailer", "car", "truck", "special", "obu", "cpc", "updateTime"};
        sourceDBInfo.addExtraOption("MSSQLNATIVE", "characterEncoding", "utf8");

        DatabaseMeta targetDBInfo = new DatabaseMeta("db_to_mysql", "MYSQL", "Native", "172.17.0.47", "leatop-tsem", "3306", "root", "sa@123");
        String targetTableName = "tsm_trafficflow_lane_h";
        String[] targetFields = sourceFields;
        targetDBInfo.addExtraOption("MYSQL", "characterEncoding", "utf8");  // 必须设置编码方式， 否则数据同步会出现乱码

        Boolean clearRepeatData = true;

        // 检查数据库链接是否有效
        String report = sourceDBInfo.testConnection();
        System.err.println("report===>" + report);
        String report1 = targetDBInfo.testConnection();
        System.err.println("report===>" + report1);

        String getFlagSql = "SELECT date_format(ifnull(max(updateTime), date_add(now(), INTERVAL -60 day)), '%Y-%m-%d %H:%i:00') as last_update_time FROM tsm_trafficflow_lane_h";
        String deleteSql = "delete from tsm_trafficflow_lane_h where updateTime >= '${last_update_time}'";
        String selectSql = "SELECT * FROM TSM_TrafficFlow_Lane_H where updateTime > convert(datetime, '${last_update_time}', 20)";

        // 获取增量标志位
        /*TransMeta getFlagTrans = getSyncFlagTrans(transName, targetDBInfo, getFlagSql);
        List<Map<String, String>> result = getJsonDataByKettle(getFlagTrans, transName, targetDBInfo, getFlagSql);

        Map<String, String> param = Maps.newHashMap();
        if(null != result && result.size() > 0 && result.get(0).containsKey("last_update_time")) {
            param.putAll(result.get(0));
        }
        System.err.println("===>" + param.toString());*/

        // TransMeta syncDataTrans = syncData(transName, sourceDBInfo, sourceTableName, sourceFields, targetDBInfo, targetTableName, targetFields, clearRepeatData , deleteSql, selectSql);
        // TransMeta syncDataTrans = insertOrUpdateData(transName, sourceDBInfo, sourceTableName, sourceFields, targetDBInfo, targetTableName, targetFields, "date_id" , selectSql, true);
        // TransMeta syncDataTrans = execSQL(transName, sourceDBInfo,  deleteSql);
        // runTrans(syncDataTrans, param);

    }

}
