package com.sl.core.engine.trans;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.exceptions.ExceptionUtil;
import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.ReflectUtil;
import cn.hutool.core.util.StrUtil;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.sl.core.engine.ProcessConfig;
import com.sl.core.engine.constants.ProcessCompDescriptorConstants;
import com.sl.core.engine.datasource.AnylineServiceAdaptor;
import com.sl.core.engine.listener.PipelineExecutionListener;
import com.sl.core.engine.listener.PipelineListener;
import com.sl.core.engine.log.ProcessLogChannel;
import com.sl.core.engine.log.ProcessLoggingEventListener;
import com.sl.core.engine.meta.impl.AbstractProcessCompDescriptor;
import com.sl.core.engine.meta.trans.ProcessTransDescriptor;
import com.sl.core.engine.rowset.ProcessDbRowSet;
import com.sl.core.engine.rowset.ProcessFileRowSet;
import com.sl.core.engine.rowset.ProcessRowSetEnum;
import com.sl.core.engine.status.ProcessExecutorCompInfo;
import com.sl.core.engine.step.impl.AbstractProcessComp;
import com.sl.core.engine.thread.ProcessRunSingleThreaded;
import com.sl.core.engine.thread.ProcessRunThread;
import com.sl.core.engine.thread.ThreadConfig;
import com.sl.core.engine.thread.TraceThreadPoolTaskExecutor;
import lombok.Getter;
import lombok.Setter;
import lombok.SneakyThrows;
import org.apache.hop.core.*;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.extension.ExtensionPointHandler;
import org.apache.hop.core.extension.HopExtensionPoint;
import org.apache.hop.core.logging.*;
import org.apache.hop.core.row.value.ValueMetaString;
import org.apache.hop.core.util.EnvUtil;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.vfs.HopVfs;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.partition.PartitionSchema;
import org.apache.hop.pipeline.*;
import org.apache.hop.pipeline.config.IPipelineEngineRunConfiguration;
import org.apache.hop.pipeline.config.PipelineRunConfiguration;
import org.apache.hop.pipeline.engine.EngineComponent;
import org.apache.hop.pipeline.engine.IPipelineEngine;
import org.apache.hop.pipeline.engine.PipelineEngineCapabilities;
import org.apache.hop.pipeline.engines.local.LocalPipelineEngineCapabilities;
import org.apache.hop.pipeline.engines.local.LocalPipelineRunConfiguration;
import org.apache.hop.pipeline.transform.*;

import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

public class ProcessTrans extends Pipeline {
    private static Class<?> PKG = ProcessTrans.class; // for i18n purposes, needed by Translator2!!

    private TraceThreadPoolTaskExecutor initTaskExecutor;

    private PipelineEngineCapabilities engineCapabilities = new LocalPipelineEngineCapabilities();


    private TraceThreadPoolTaskExecutor executeThreadTaskExecutor;

    @Getter
    private Long compTimeout;

    private String rowSetTag;

    @Getter
    private String rowSetTagDataSourceId;

    @Getter
    @Setter
    private Boolean compensateFlag = Boolean.FALSE;

    @Getter
    @Setter
    private String compensateLogId;

    @Getter
    @Setter
    private Boolean printStepStatusFlag;


    @Getter
    private List<PipelineListener> pipelineListeners;

    @Getter
    private AnylineServiceAdaptor anylineServiceAdaptor;

    protected static final TraceThreadPoolTaskExecutor DEFAULT_POOL = new TraceThreadPoolTaskExecutor(20,
            200,
            5, TimeUnit.MINUTES, 1000, "default");

    static {
        DEFAULT_POOL.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
    }

    public ProcessTrans(ProcessTransDescriptor transMeta, ProcessConfig processConfig) {
        this(transMeta, processConfig, null);
    }

    public ProcessTrans(ProcessTransDescriptor transMeta, ProcessConfig processConfig, ProcessInstance processInstance) {
        super(transMeta, ObjectUtil.isNotNull(processInstance) ? processInstance.getVariables() : null, processInstance);
        IPipelineEngineRunConfiguration pipelineEngineRunConfiguration = new LocalPipelineRunConfiguration();
        pipelineEngineRunConfiguration.setEnginePluginId(processConfig.getOpenId());
        pipelineEngineRunConfiguration.setEnginePluginName(processConfig.getProcessName());
        PipelineRunConfiguration pipelineRunConfiguration = new PipelineRunConfiguration();
        pipelineRunConfiguration.setEngineRunConfiguration(pipelineEngineRunConfiguration);
        pipelineRunConfiguration.setName(processConfig.getProcessName());
        this.setPipelineRunConfiguration(pipelineRunConfiguration);
        setInitThreadPool(processConfig.getPrepareThreadCode());
        setExecuteThreadPool(processConfig.getExecuteThreadCode());
        this.compTimeout = processConfig.getCompTimeout();
        this.rowSetTag = processConfig.getRowSetTag();
        this.rowSetTagDataSourceId = processConfig.getRowSetTagDataSourceId();
        this.compensateFlag = processConfig.getCompensateFlag();
        this.compensateLogId = processConfig.getCompensateLogId();

        if (ObjectUtil.isNull(compTimeout)) {
            compTimeout = 60 * 10L;
        }
        anylineServiceAdaptor = new AnylineServiceAdaptor();
        pipelineListeners = Lists.newArrayList();
        pipelineListeners.add(PipelineExecutionListener.INSTANCE);
    }

    private void setInitThreadPool(String bootThreadCode) {
        TraceThreadPoolTaskExecutor taskExecutor = ThreadConfig.INSTANCE.get(bootThreadCode);
        this.initTaskExecutor = taskExecutor;

    }

    public void setProcessLogChannel() {
        ILogChannel sourceLog = this.log;
        this.log = new ProcessLogChannel(this, this.getParent());
        this.log.setLogLevel(sourceLog.getLogLevel());
        ((ProcessLogChannel) this.log).setHooks(this);

        this.log.setContainerObjectId(this.containerObjectId);
    }

    private void setExecuteThreadPool(String bootThreadCode) {
        TraceThreadPoolTaskExecutor taskExecutor = ThreadConfig.INSTANCE.get(bootThreadCode);
        if (ObjectUtil.isNull(taskExecutor)) {
            taskExecutor = DEFAULT_POOL;
        }
        this.executeThreadTaskExecutor = taskExecutor;

    }


    protected String getTid() {
        return null;
    }


    @Override
    public PipelineEngineCapabilities getEngineCapabilities() {
        return engineCapabilities;
    }

    @Override
    public String getStatusDescription() {
        return super.getStatus();
    }


    @Override
    public void prepareExecution() throws HopException {

        setPreparing(true);
//        executionStartDate = new Date();
        setExecutionStartDate(new Date());
        setRunning(false);

        // We create the log channel when we're ready to rock and roll
        // Before that it makes little sense. We default to GENERAL there.
        // We force the creation of a new log channel ID every time we run this pipeline
        //
        this.log = new ProcessLogChannel(this, getParent() == null ? this : this.getParent(), isGatheringMetrics(), true);
        this.log.setLogLevel(logLevel);

        if (this.containerObjectId == null) {
            this.containerObjectId = log.getContainerObjectId();
        }

        if (log.isDebug()) {
            log.logDebug(
                    BaseMessages.getString(
                            PKG,
                            "Pipeline.Log.NumberOfTransformsToRun",
                            String.valueOf(pipelineMeta.nrTransforms()),
                            String.valueOf(pipelineMeta.nrPipelineHops())));
        }

        log.snap(Metrics.METRIC_PIPELINE_EXECUTION_START);
        log.snap(Metrics.METRIC_PIPELINE_INIT_START);

        log.logBasic(
                "Executing this pipeline using the Local Pipeline Engine with run configuration '"
                        + pipelineRunConfiguration.getName()
                        + "'");

        ExtensionPointHandler.callExtensionPoint(
                log, this, HopExtensionPoint.PipelinePrepareExecution.id, this);

        activateParameters(this);

        if (pipelineMeta.getName() == null) {
            if (pipelineMeta.getFilename() != null) {
                log.logBasic(
                        BaseMessages.getString(
                                PKG, "Pipeline.Log.ExecutionStartedForFilename", pipelineMeta.getFilename()));
            }
        } else {
            log.logBasic(
                    BaseMessages.getString(
                            PKG, "Pipeline.Log.ExecutionStartedForPipeline", pipelineMeta.getName()));
        }

        if (isSafeModeEnabled()) {
            if (log.isDetailed()) {
                log.logDetailed(
                        BaseMessages.getString(PKG, "Pipeline.Log.SafeModeIsEnabled", pipelineMeta.getName()));
            }
        }

        // setInternalHopVariables(this); --> Let's not do this, when running
        // without file, for example remote, it spoils the fun

        // Keep track of all the row sets and allocated transforms
        //
        setTransforms(Collections.synchronizedList(new ArrayList<>()));

        List<TransformMetaDataCombi> transforms = getTransforms();

        rowsets = new ArrayList<>();

        List<TransformMeta> hopTransforms = pipelineMeta.getPipelineHopTransforms(false);

        if (log.isDetailed()) {
            log.logDetailed(
                    BaseMessages.getString(
                            PKG, "Pipeline.Log.FoundDefferentTransforms", String.valueOf(hopTransforms.size())));
            log.logDetailed(BaseMessages.getString(PKG, "Pipeline.Log.AllocatingRowsets"));
        }
        // First allocate all the rowsets required!
        // Note that a mapping doesn't receive ANY input or output rowsets...
        //
        for (int i = 0; i < hopTransforms.size(); i++) {
            TransformMeta thisTransform = hopTransforms.get(i);
            if (thisTransform.isMapping()) {
                continue; // handled and allocated by the mapping transform itself.
            }

            if (log.isDetailed()) {
                log.logDetailed(
                        BaseMessages.getString(
                                PKG,
                                "Pipeline.Log.AllocateingRowsetsForTransform",
                                String.valueOf(i),
                                thisTransform.getName()));
            }

            List<TransformMeta> nextTransforms = pipelineMeta.findNextTransforms(thisTransform);
            for (TransformMeta nextTransform : nextTransforms) {
                // What's the next transform?
                if (nextTransform.isMapping()) {
                    continue; // handled and allocated by the mapping transform itself.
                }

                // How many times do we start the source transform?
                int thisCopies = thisTransform.getCopies(this);

                if (thisCopies < 0) {
                    // This can only happen if a variable is used that didn't resolve to a positive integer
                    // value
                    //
                    throw new HopException(
                            BaseMessages.getString(
                                    PKG, "Pipeline.Log.TransformCopiesNotCorrectlyDefined", thisTransform.getName()));
                }

                // How many times do we start the target transform?
                int nextCopies = nextTransform.getCopies(this);

                // Are we re-partitioning?
                boolean repartitioning;
                if (thisTransform.isPartitioned()) {
                    repartitioning =
                            !thisTransform
                                    .getTransformPartitioningMeta()
                                    .equals(nextTransform.getTransformPartitioningMeta());
                } else {
                    repartitioning = nextTransform.isPartitioned();
                }

                int nrCopies;
                if (log.isDetailed()) {
                    log.logDetailed(
                            BaseMessages.getString(
                                    PKG,
                                    "Pipeline.Log.copiesInfo",
                                    String.valueOf(thisCopies),
                                    String.valueOf(nextCopies)));
                }
                int dispatchType;
                if (thisCopies == 1 && nextCopies == 1) {
                    dispatchType = TYPE_DISP_1_1;
                    nrCopies = 1;
                } else if (thisCopies == 1 && nextCopies > 1) {
                    dispatchType = TYPE_DISP_1_N;
                    nrCopies = nextCopies;
                } else if (thisCopies > 1 && nextCopies == 1) {
                    dispatchType = TYPE_DISP_N_1;
                    nrCopies = thisCopies;
                } else if (thisCopies == nextCopies && !repartitioning) {
                    dispatchType = TYPE_DISP_N_N;
                    nrCopies = nextCopies;
                } else {
                    // > 1!
                    dispatchType = TYPE_DISP_N_M;
                    nrCopies = nextCopies;
                } // Allocate a rowset for each destination transform

                // Allocate the rowsets
                //
                if (dispatchType != TYPE_DISP_N_M) {
                    for (int c = 0; c < nrCopies; c++) {
                        IRowSet rowSet;
                        switch (pipelineMeta.getPipelineType()) {
                            case Normal:
                                // This is a temporary patch until the batching rowset has proven
                                // to be working in all situations.
                                // Currently there are stalling problems when dealing with small
                                // amounts of rows.
                                //
                                Boolean batchingRowSet =
                                        ValueMetaString.convertStringToBoolean(
                                                System.getProperty(Const.HOP_BATCHING_ROWSET));
                                if (StrUtil.equals(rowSetTag, ProcessRowSetEnum.FILE.getCode())) {
                                    rowSet = new ProcessFileRowSet(this);
                                } else if (StrUtil.equals(rowSetTag, ProcessRowSetEnum.DB.getCode())) {
                                    rowSet = new ProcessDbRowSet(this);
                                } else if (batchingRowSet != null && batchingRowSet.booleanValue()) {
                                    rowSet = new BlockingBatchingRowSet(rowSetSize);
                                } else {
                                    rowSet = new BlockingRowSet(rowSetSize);
                                }
                                break;

                            case SingleThreaded:
                                if (StrUtil.equals(rowSetTag, ProcessRowSetEnum.FILE.getCode())) {
                                    rowSet = new ProcessFileRowSet(this);
                                } else if (StrUtil.equals(rowSetTag, ProcessRowSetEnum.DB.getCode())) {
                                    rowSet = new ProcessDbRowSet(this);
                                } else {
                                    rowSet = new QueueRowSet();
                                }
                                break;
                            default:
                                throw new HopException(
                                        "Unhandled pipeline type: " + pipelineMeta.getPipelineType());
                        }

                        switch (dispatchType) {
                            case TYPE_DISP_1_1:
                                rowSet.setThreadNameFromToCopy(
                                        thisTransform.getName(), 0, nextTransform.getName(), 0);
                                break;
                            case TYPE_DISP_1_N:
                                rowSet.setThreadNameFromToCopy(
                                        thisTransform.getName(), 0, nextTransform.getName(), c);
                                break;
                            case TYPE_DISP_N_1:
                                rowSet.setThreadNameFromToCopy(
                                        thisTransform.getName(), c, nextTransform.getName(), 0);
                                break;
                            case TYPE_DISP_N_N:
                                rowSet.setThreadNameFromToCopy(
                                        thisTransform.getName(), c, nextTransform.getName(), c);
                                break;
                            default:
                                break;
                        }
                        rowsets.add(rowSet);
                        if (log.isDetailed()) {
                            log.logDetailed(
                                    BaseMessages.getString(
                                            PKG, "Pipeline.PipelineAllocatedNewRowset", rowSet.toString()));
                        }
                    }
                } else {
                    // For each N source transforms we have M target transforms
                    //
                    // From each input transform we go to all output transforms.
                    // This allows maximum flexibility for re-partitioning,
                    // distribution...
                    for (int s = 0; s < thisCopies; s++) {

                        for (int t = 0; t < nextCopies; t++) {
                            IRowSet rowSet;
                            if (StrUtil.equals(rowSetTag, ProcessRowSetEnum.FILE.getCode())) {
                                rowSet = new ProcessFileRowSet(this);
                            } else if (StrUtil.equals(rowSetTag, ProcessRowSetEnum.DB.getCode())) {
                                rowSet = new ProcessDbRowSet(this);
                            } else {
                                rowSet = new BlockingRowSet(rowSetSize);
                            }
                            rowSet.setThreadNameFromToCopy(
                                    thisTransform.getName(), s, nextTransform.getName(), t);
                            rowsets.add(rowSet);
                            if (log.isDetailed()) {
                                log.logDetailed(
                                        BaseMessages.getString(
                                                PKG, "Pipeline.PipelineAllocatedNewRowset", rowSet.toString()));
                            }
                        }
                    }
                }
            }
            log.logDetailed(
                    BaseMessages.getString(
                            PKG,
                            "Pipeline.Log.AllocatedRowsets",
                            String.valueOf(rowsets.size()),
                            String.valueOf(i),
                            thisTransform.getName())
                            + " ");
        }

        if (log.isDetailed()) {
            log.logDetailed(
                    BaseMessages.getString(PKG, "Pipeline.Log.AllocatingTransformsAndTransformData"));
        }

        // Allocate the transforms & the data...
        //
        for (TransformMeta transformMeta : hopTransforms) {
            String transformid = transformMeta.getTransformPluginId();

            if (log.isDetailed()) {
                log.logDetailed(
                        BaseMessages.getString(
                                PKG,
                                "Pipeline.Log.PipelineIsToAllocateTransform",
                                transformMeta.getName(),
                                transformid));
            }

            // How many copies are launched of this transform?
            int nrCopies = transformMeta.getCopies(this);

            if (log.isDebug()) {
                log.logDebug(
                        BaseMessages.getString(
                                PKG, "Pipeline.Log.TransformHasNumberRowCopies", String.valueOf(nrCopies)));
            }

            // At least run once...
            for (int c = 0; c < nrCopies; c++) {
                // Make sure we haven't started it yet!
                if (!hasTransformStarted(transformMeta.getName(), c)) {
                    TransformMetaDataCombi combi = new TransformMetaDataCombi();

                    combi.transformName = transformMeta.getName();
                    combi.copy = c;

                    // The meta-data
                    combi.transformMeta = transformMeta;
                    combi.meta = transformMeta.getTransform();

                    // Allocate the transform data
                    ITransformData data = combi.meta.createTransformData();
                    combi.data = data;

                    // Allocate the transform
                    ITransform transform =
                            combi.meta.createTransform(transformMeta, data, c, pipelineMeta, this);

                    // Copy the variables of the pipeline to the transform...
                    // don't share. Each copy of the transform has its own variables.
                    //
                    transform.initializeFrom(this);

                    // Pass the metadataProvider to the transforms runtime
                    //
                    transform.setMetadataProvider(metadataProvider);

                    // If the transform is partitioned, set the partitioning ID and some other
                    // things as well...
                    if (transformMeta.isPartitioned()) {
                        List<String> partitionIDs =
                                transformMeta
                                        .getTransformPartitioningMeta()
                                        .getPartitionSchema()
                                        .calculatePartitionIds(this);
                        if (partitionIDs != null && partitionIDs.size() > 0) {
                            transform.setPartitionId(partitionIDs.get(c)); // Pass the partition ID
                            // to the transform
                        }
                    }

                    // Save the transform too
                    combi.transform = transform;

                    // Pass logging level and metrics gathering down to the transform level.
                    // /
                    if (combi.transform instanceof ILoggingObject) {
                        ILogChannel logChannel = combi.transform.getLogChannel();
                        logChannel.setLogLevel(logLevel);
                        logChannel.setGatheringMetrics(log.isGatheringMetrics());
                    }

                    // Add to the bunch...
                    transforms.add(combi);

                    if (log.isDetailed()) {
                        log.logDetailed(
                                BaseMessages.getString(
                                        PKG,
                                        "Pipeline.Log.PipelineHasAllocatedANewTransform",
                                        transformMeta.getName(),
                                        String.valueOf(c)));
                    }
                }
            }
        }

        // Now we need to verify if certain rowsets are not meant to be for error
        // handling...
        // Loop over the transforms and for every transform verify the output rowsets
        // If a rowset is going to a target transform in the transforms error handling
        // metadata, set it to the errorRowSet.
        // The input rowsets are already in place, so the next transform just accepts the
        // rows.
        // Metadata wise we need to do the same trick in PipelineMeta
        //
        for (TransformMetaDataCombi combi : transforms) {
            if (combi.transformMeta.isDoingErrorHandling()) {
                combi.transform.identifyErrorOutput();
            }
        }

        // Now (optionally) write start log record!
        // Make sure we synchronize appropriately to avoid duplicate batch IDs.
        //
        Object syncObject = this;
        if (getParentWorkflow() != null) {
            syncObject = getParentWorkflow(); // parallel execution in a workflow
        }
        if (getParentPipeline() != null) {
            syncObject = getParentPipeline(); // multiple sub-pipelines
        }
        synchronized (syncObject) {
            calculateBatchIdAndDateRange();
            beginProcessing();
        }

        // Set the partition-to-rowset mapping
        //
        for (TransformMetaDataCombi sid : transforms) {
            TransformMeta transformMeta = sid.transformMeta;
            ITransform baseTransform = sid.transform;

            baseTransform.setPartitioned(transformMeta.isPartitioned());

            // Now let's take a look at the source and target relation
            //
            // If this source transform is not partitioned, and the target transform is: it
            // means we need to re-partition the incoming data.
            // If both transforms are partitioned on the same method and schema, we don't
            // need to re-partition
            // If both transforms are partitioned on a different method or schema, we need
            // to re-partition as well.
            // If both transforms are not partitioned, we don't need to re-partition
            //
            boolean isThisPartitioned = transformMeta.isPartitioned();
            PartitionSchema thisPartitionSchema = null;
            if (isThisPartitioned) {
                thisPartitionSchema = transformMeta.getTransformPartitioningMeta().getPartitionSchema();
            }

            boolean isNextPartitioned = false;
            TransformPartitioningMeta nextTransformPartitioningMeta = null;
            PartitionSchema nextPartitionSchema = null;

            List<TransformMeta> nextTransforms = pipelineMeta.findNextTransforms(transformMeta);
            for (TransformMeta nextTransform : nextTransforms) {
                if (nextTransform.isPartitioned()) {
                    isNextPartitioned = true;
                    nextTransformPartitioningMeta = nextTransform.getTransformPartitioningMeta();
                    nextPartitionSchema = nextTransformPartitioningMeta.getPartitionSchema();
                }
            }

            baseTransform.setRepartitioning(TransformPartitioningMeta.PARTITIONING_METHOD_NONE);

            // If the next transform is partitioned differently, set re-partitioning, when
            // running locally.
            //
            if ((!isThisPartitioned && isNextPartitioned)
                    || (isThisPartitioned
                    && isNextPartitioned
                    && !thisPartitionSchema.equals(nextPartitionSchema))) {
                baseTransform.setRepartitioning(nextTransformPartitioningMeta.getMethodType());
            }

            // For partitioning to a set of remove transforms (repartitioning from a master
            // to a set or remote output transforms)
            //
            TransformPartitioningMeta targetTransformPartitioningMeta =
                    baseTransform.getTransformMeta().getTargetTransformPartitioningMeta();
            if (targetTransformPartitioningMeta != null) {
                baseTransform.setRepartitioning(targetTransformPartitioningMeta.getMethodType());
            }
        }

        setPreparing(false);
        setInitializing(true);

        // Do a topology sort... Over 150 transform (copies) things might be slowing down too much.
        //
        if (isSortingTransformsTopologically() && transforms.size() < 150) {
            doTopologySortOfTransforms();
        }

        if (log.isDetailed()) {
            log.logDetailed(
                    BaseMessages.getString(
                            PKG, "Pipeline.Log.InitialisingTransforms", String.valueOf(transforms.size())));
        }

        TransformInitThread[] initThreads = new TransformInitThread[transforms.size()];
        Thread[] threads = new Thread[transforms.size()];

        // Initialize all the threads...
        //
        List<Future<?>> futures = Lists.newArrayList();
        for (int i = 0; i < transforms.size(); i++) {
            final TransformMetaDataCombi sid = transforms.get(i);

            // Do the init code in the background!
            // Init all transforms at once, but ALL transforms need to finish before we can
            // continue properly!
            //
            initThreads[i] = new TransformInitThread(sid, this, log);

            // Put it in a separate thread!
            //
            threads[i] = new Thread(initThreads[i]);
            threads[i].setName(
                    "init of " + sid.transformName + "." + sid.copy + " (" + threads[i].getName() + ")");

            ExtensionPointHandler.callExtensionPoint(
                    log, this, HopExtensionPoint.TransformBeforeInitialize.id, initThreads[i]);
            if (ObjectUtil.isNull(initTaskExecutor)) {
                initThreads[i].run();
            } else {
                Future<?> submit = initTaskExecutor.submit(initThreads[i]);
                futures.add(submit);
            }
//            threads[i].start();
        }

        if (ObjectUtil.isNotNull(initTaskExecutor)) {
            for (int i = 0; i < futures.size(); i++) {
                try {
                    Future<?> o = futures.get(i);
                    o.get(60, TimeUnit.SECONDS);
                    ExtensionPointHandler.callExtensionPoint(
                            log, this, HopExtensionPoint.TransformAfterInitialize.id, initThreads[i]);
                } catch (Exception ex) {
                    log.logError("Error with init thread: " + ex.getMessage(), ex.getMessage());
                    log.logError(Const.getStackTracker(ex));
                }
            }
        }
//        for (int i = 0; i < threads.length; i++) {
//            try {
//                threads[i].join();
//                ExtensionPointHandler.callExtensionPoint(
//                        log, this, HopExtensionPoint.TransformAfterInitialize.id, initThreads[i]);
//            } catch (Exception ex) {
//                log.logError("Error with init thread: " + ex.getMessage(), ex.getMessage());
//                log.logError(Const.getStackTracker(ex));
//            }
//        }

        setInitializing(false);
        boolean ok = true;

        // All transform are initialized now: see if there was one that didn't do it
        // correctly!
        //
        for (TransformInitThread thread : initThreads) {
            TransformMetaDataCombi combi = thread.getCombi();
            if (!thread.isOk()) {
                log.logError(
                        BaseMessages.getString(
                                PKG, "Pipeline.Log.TransformFailedToInit", combi.transformName + "." + combi.copy));
                combi.data.setStatus(EngineComponent.ComponentExecutionStatus.STATUS_STOPPED);
                ok = false;
            } else {
                combi.data.setStatus(EngineComponent.ComponentExecutionStatus.STATUS_IDLE);
                if (log.isDetailed()) {
                    log.logDetailed(
                            BaseMessages.getString(
                                    PKG,
                                    "Pipeline.Log.TransformInitialized",
                                    combi.transformName + "." + combi.copy));
                }
            }
        }

        if (!ok) {
            // Halt the other threads as well, signal end-of-the line to the outside
            // world...
            // Also explicitly call dispose() to clean up resources opened during
            // init()
            //
            for (TransformInitThread initThread : initThreads) {
                TransformMetaDataCombi combi = initThread.getCombi();

                // Dispose will overwrite the status, but we set it back right after
                // this.
                combi.transform.dispose();

                if (initThread.isOk()) {
                    combi.data.setStatus(EngineComponent.ComponentExecutionStatus.STATUS_HALTED);
                } else {
                    combi.data.setStatus(EngineComponent.ComponentExecutionStatus.STATUS_STOPPED);
                }
            }

            // Just for safety, fire the pipeline finished listeners...
            try {
                fireExecutionFinishedListeners();
            } catch (HopException e) {
                // listeners produces errors
                log.logError(BaseMessages.getString(PKG, "Pipeline.FinishListeners.Exception"));
                // we will not pass this exception up to prepareExecuton() entry point.
            } finally {
                // Flag the pipeline as finished even if exception was thrown
                setFinished(true);
            }

            // Pass along the log during preview. Otherwise it becomes hard to see
            // what went wrong.
            //
            if (isPreview()) {
                String logText = HopLogStore.getAppender().getBuffer(getLogChannelId(), true).toString();
                throw new HopException(
                        BaseMessages.getString(PKG, "Pipeline.Log.FailToInitializeAtLeastOneTransform")
                                + Const.CR
                                + logText);
            } else {
                throw new HopException(
                        BaseMessages.getString(PKG, "Pipeline.Log.FailToInitializeAtLeastOneTransform")
                                + Const.CR);
            }
        }

        log.snap(Metrics.METRIC_PIPELINE_INIT_STOP);

        setReadyToStart(true);
    }


    /**
     * Starts the threads prepared by prepareThreads(). Before you start the threads, you can add
     * RowListeners to them.
     *
     * @throws HopException if there is a communication error with a remote output socket.
     */
    @Override
    public void startThreads() throws HopException {
        // Now prepare to start all the threads...
        //
        callStepStatus();

        //初始化日志
        injectLogLevel();

//        nrOfFinishedTransforms = 0;
        ReflectUtil.setFieldValue(this, "nrOfFinishedTransforms", 0);

        ExtensionPointHandler.callExtensionPoint(
                log, this, HopExtensionPoint.PipelineStartThreads.id, this);


//        addExecutionStartedListener(PipelineExecutionListener.INSTANCE);
//        addExecutionStoppedListener(PipelineExecutionListener.INSTANCE);

        super.fireExecutionStartedListeners();

        for (PipelineListener pipelineListener : pipelineListeners) {
            pipelineListener.started(this);
        }


        List<TransformMetaDataCombi> transforms = getTransforms();
        for (int i = 0; i < transforms.size(); i++) {
            final TransformMetaDataCombi sid = transforms.get(i);
            sid.transform.markStart();
            sid.transform.initBeforeStart();

            callStepStatus(sid.transform);


            // also attach a listener to detect when we're done...
            //
            ITransformFinishedListener finishedListener =
                    (pipeline, transformMeta, transform) -> {
                        synchronized (ProcessTrans.this) {
//                            nrOfFinishedTransforms++;
                            int nrOfFinishedTransforms = (int) ReflectUtil.getFieldValue(this, "nrOfFinishedTransforms");
                            ReflectUtil.setFieldValue(this, "nrOfFinishedTransforms",
                                    ++nrOfFinishedTransforms
                            );

                            if (nrOfFinishedTransforms >= transforms.size()) {
                                // Set the finished flag
                                //
                                setFinished(true);

                                // Grab the performance statistics one last time (if enabled)
                                //
                                addTransformPerformanceSnapShot();

                                // We're really done now.
                                //
                                setExecutionEndDate(new Date());
//                                callStepStatus(transform);
                                try {
                                    fireExecutionFinishedListeners();
                                } catch (Exception e) {
                                    transform.setErrors(transform.getErrors() + 1L);
                                    log.logError(
                                            getName()
                                                    + " : "
                                                    + BaseMessages.getString(
                                                    PKG, "Pipeline.Log.UnexpectedErrorAtPipelineEnd"),
                                            e);
                                }

                                log.logBasic(
                                        "Execution finished on a local pipeline engine with run configuration '"
                                                + pipelineRunConfiguration.getName()
                                                + "'");
                            }

                            // If a transform fails with an error, we want to kill/stop the others
                            // too...
                            //
                            if (transform.getErrors() > 0) {

                                log.logMinimal(BaseMessages.getString(PKG, "Pipeline.Log.PipelineDetectedErrors"));
                                log.logMinimal(
                                        BaseMessages.getString(
                                                PKG, "Pipeline.Log.PipelineIsKillingTheOtherTransforms"));

                                ReflectUtil.invoke(this, "killAllNoWait");
//                                killAllNoWait();
                            }
//                            callStepStatus(transform);
                        }
                    };

            // Make sure this is called first!
            //
            if (sid.transform instanceof BaseTransform) {
                ((BaseTransform) sid.transform).getTransformFinishedListeners().add(0, finishedListener);
            } else {
                sid.transform.addTransformFinishedListener(finishedListener);
            }
        }

        if (pipelineMeta.isCapturingTransformPerformanceSnapShots()) {
//            transformPerformanceSnapshotSeqNr = new AtomicInteger(0);
            ReflectUtil.setFieldValue(this, "transformPerformanceSnapshotSeqNr", new AtomicInteger(0));
//            transformPerformanceSnapShots = new ConcurrentHashMap<>();
            setTransformPerformanceSnapShots(new ConcurrentHashMap<>());
            // Calculate the maximum number of snapshots to be kept in memory
            //
            String limitString = resolve(pipelineMeta.getTransformPerformanceCapturingSizeLimit());
            if (Utils.isEmpty(limitString)) {
                limitString = EnvUtil.getSystemProperty(Const.HOP_TRANSFORM_PERFORMANCE_SNAPSHOT_LIMIT);
            }
//            transformPerformanceSnapshotSizeLimit = Const.toInt(limitString, 0);
            ReflectUtil.setFieldValue(this, "transformPerformanceSnapshotSizeLimit", Const.toInt(limitString, 0));

            // Set a timer to collect the performance data from the running threads...
            //

            Timer transformPerformanceSnapShotTimer =
                    new Timer("transformPerformanceSnapShot Timer: " + pipelineMeta.getName());
            ReflectUtil.setFieldValue(this, "transformPerformanceSnapShotTimer", transformPerformanceSnapShotTimer);

            TimerTask timerTask =
                    new TimerTask() {
                        @Override
                        public void run() {
                            if (!isFinished()) {
                                addTransformPerformanceSnapShot();
                            }
                        }
                    };
            transformPerformanceSnapShotTimer.schedule(
                    timerTask, 100, pipelineMeta.getTransformPerformanceCapturingDelay());
        }

        // Now start a thread to monitor the running pipeline...
        //
        setFinished(false);
        setPaused(false);
        setStopped(false);

        ArrayBlockingQueue<Object> pipelineWaitUntilFinishedBlockingQueue = new ArrayBlockingQueue<>(10);
        ReflectUtil.setFieldValue(this, "pipelineWaitUntilFinishedBlockingQueue", pipelineWaitUntilFinishedBlockingQueue);
        // Do all sorts of nifty things at the end of the pipeline execution
        ///
        IExecutionFinishedListener<IPipelineEngine<PipelineMeta>> executionListener =
                pipeline -> {
                    try {
                        ExtensionPointHandler.callExtensionPoint(
                                log, this, HopExtensionPoint.PipelineFinish.id, pipeline);
                    } catch (HopException e) {
                        throw new RuntimeException("Error calling extension point at end of pipeline", e);
                    }

                    // First of all, stop the performance snapshot timer if there is is
                    // one...
                    //
                    Timer transformPerformanceSnapShotTimer = (Timer) ReflectUtil.getFieldValue(this, "transformPerformanceSnapShotTimer");

                    if (pipelineMeta.isCapturingTransformPerformanceSnapShots()
                            && transformPerformanceSnapShotTimer != null) {
                        transformPerformanceSnapShotTimer.cancel();
                    }

                    setFinished(true);
                    setRunning(false); // no longer running

                    log.snap(Metrics.METRIC_PIPELINE_EXECUTION_STOP);

                    // release unused vfs connections
                    HopVfs.freeUnusedResources();
//                    callStepStatus();
                };
        // This should always be done first so that the other listeners achieve a clean state to start
        // from (setFinished and
        // so on)
        //
        addExecutionFinishedListener(executionListener);
//        addExecutionFinishedListener(PipelineExecutionListener.INSTANCE);

        setRunning(true);

        switch (pipelineMeta.getPipelineType()) {
            case Normal:

                // Now start all the threads...
                //
                Map<ITransform, Future<?>> futures = Maps.newHashMap();
                for (final TransformMetaDataCombi combi : transforms) {
                    ProcessRunThread runThread = new ProcessRunThread(combi);
//                    RunThread runThread = new RunThread(combi);
//                    Thread thread = new Thread(runThread);
//                    thread.setName(getName() + " - " + combi.transformName);
                    ExtensionPointHandler.callExtensionPoint(
                            log, this, HopExtensionPoint.TransformBeforeStart.id, combi);
                    // Call an extension point at the end of the transform
                    //
                    combi.transform.addTransformFinishedListener(
                            (pipeline, transformMeta, transform) -> {
                                try {
                                    ExtensionPointHandler.callExtensionPoint(
                                            log, this, HopExtensionPoint.TransformFinished.id, combi);
                                } catch (HopException e) {
                                    throw new RuntimeException(
                                            "Unexpected error in calling extension point upon transform finish", e);
                                }
                            });

                    Future<?> submit = executeThreadTaskExecutor.submit(runThread);
                    futures.put(combi.transform, submit);
                }
                for (Map.Entry<ITransform, Future<?>> si : futures.entrySet()) {
                    ITransform key = si.getKey();
                    Future<?> value = si.getValue();

                    String variable = key.getVariable(ProcessCompDescriptorConstants.KEY_COMP_TIME_OUT);
                    if (StrUtil.isBlank(variable)) {
                        variable = compTimeout.toString();
                    }
                    if (StrUtil.isNotBlank(variable) && !StrUtil.equals(variable, "null")) {
                        try {
                            value.get(Long.parseLong(variable), TimeUnit.SECONDS);
                        } catch (Throwable e) {
                            throw new RuntimeException(e);
                        }
                    }
                }
                break;

            case SingleThreaded:
                ProcessRunSingleThreaded runThread = new ProcessRunSingleThreaded(transforms, pipelineMeta, this);
                runThread.run();
                executeThreadTaskExecutor.addTrackFuture(Thread.currentThread());
                // Don't do anything, this needs to be handled by the pipeline
                // executor!
                //
                break;
            default:
                break;
        }

        ExtensionPointHandler.callExtensionPoint(log, this, HopExtensionPoint.PipelineStart.id, this);

        // If there are no transforms we don't catch the number of active transforms dropping to zero
        // So we fire the execution finished listeners here.
        //
        if (transforms.isEmpty()) {
            fireExecutionFinishedListeners();
        }

        if (log.isDetailed()) {
            log.logDetailed(
                    BaseMessages.getString(
                            PKG,
                            "Pipeline.Log.PipelineHasAllocated",
                            String.valueOf(transforms.size()),
                            String.valueOf(rowsets.size())));
        }
    }


    public void callStepStatus(ITransform stepInterface) {

        if (ObjectUtil.isNull(ProcessLoggingEventListener.INSTANCE)) {
            return;
        }
        boolean printStepStatusFlag = ProcessLoggingEventListener.INSTANCE.hasObservationFlag(this.getTid());
        //如果是测试，需要定时收集信息,补偿也加入一把，因为需要显示执行状态
        //&&BooleanUtil.isFalse(compensateFlag)
        if (BooleanUtil.isFalse(printStepStatusFlag)) {
            return;
        }
        List<ProcessExecutorCompInfo> processExecutorCompInfos = buildStepStatus(Lists.newArrayList(stepInterface));
        sendLogStatus(processExecutorCompInfos);
    }


    /**
     * 定时调度获取流程状态
     */
    public void callStepStatus() {

        if (ObjectUtil.isNull(ProcessLoggingEventListener.INSTANCE)) {
            return;
        }


        boolean printStepStatusFlag = ProcessLoggingEventListener.INSTANCE.hasObservationFlag(this.getTid());
        if(ObjectUtil.isNotNull(this.printStepStatusFlag)){
            printStepStatusFlag = this.printStepStatusFlag;
        }
        //如果是测试，需要定时收集信息
        if (BooleanUtil.isFalse(printStepStatusFlag)) {
            return;
        }
        List<TransformMetaDataCombi> steps = getTransforms();
        List<ITransform> collect = steps.stream().map(v -> v.transform).collect(Collectors.toList());
        List<ProcessExecutorCompInfo> processExecutorCompInfos = buildStepStatus(collect);
        sendLogStatus(processExecutorCompInfos);
    }

    private void sendLogStatus(List<ProcessExecutorCompInfo> executorSteps) {
        // 根据步骤名称，构建步骤状态
        if (log instanceof ProcessLogChannel) {
            ((ProcessLogChannel) log).logStatus(executorSteps);
        }
    }

    /**
     * 构建所有步骤状态
     *
     * @return ArrayList
     */
    private List<ProcessExecutorCompInfo> buildStepStatus(List<ITransform> steps) {
//        final List<StepMetaDataCombi> steps = getSteps();

        List<ProcessExecutorCompInfo> executorSteps = new ArrayList<>(steps.size());

        // 循环步骤
        for (ITransform step : steps) {
            // 创建步骤状态对象
            TransformStatus stepStatus = new TransformStatus(step);
            // 构建步骤信息
            final String stepname = step.getTransformName();

            final ProcessExecutorCompInfo executorStep = ProcessExecutorCompInfo.builder().stepName(stepname).copy(stepStatus.getCopy()).priority(stepStatus.getPriority()).linesInput(stepStatus.getLinesInput()).linesOutput(stepStatus.getLinesOutput()).linesRead(stepStatus.getLinesRead()).linesOutput(stepStatus.getLinesOutput()).linesUpdated(stepStatus.getLinesUpdated()).linesRejected(stepStatus.getLinesRejected()).stepExecutionStatus(step.getStatus().name()).statusDescription(stepStatus.getStatusDescription()).seconds(stepStatus.getSeconds()).speed(stepStatus.getSpeed()).errors(stepStatus.getErrors()).build();

            executorSteps.add(executorStep);
        }

        return executorSteps;
    }

    /**
     * Waits until all RunThreads have finished.
     */
    @SneakyThrows
    @Override
    public void waitUntilFinished() {
        super.waitUntilFinished();
        callStepStatus();
        for (PipelineListener pipelineListener : pipelineListeners) {
            pipelineListener.finished(this);
        }

    }

    protected void injectLogLevel() {
        List<TransformMetaDataCombi> transforms = getTransforms();
        LogLevel lv = this.getLogLevel();
        if(CollUtil.isEmpty(transforms)){
            return;
        }

        for (TransformMetaDataCombi transform : transforms) {
            ITransformData data = transform.data;
            ITransform tf = transform.transform;
            TransformMeta transformMeta = transform.transformMeta;
            ITransformMeta itf = transformMeta.getTransform();
            if (!(itf instanceof AbstractProcessCompDescriptor<?, ?> abs)) {
                continue;
            }
            LogLevel logLevel = abs.getLogLevel(lv);
            abs.getLog().setLogLevel(logLevel);
            tf.getLogChannel().setLogLevel(logLevel);
        }
//        transforms.forEach();
    }


    public Map<String, Object> getVariableMap() {
        String[] variableNames = getVariableNames();
        Map<String, Object> result = Maps.newHashMap();
        for (String variableName : variableNames) {
            result.put(variableName, getVariable(variableName));
        }
        return result;
    }

    public Boolean isChildProcessFlag() {
        return ObjectUtil.isNotNull(this.getParent());
    }

    public List<Throwable> getThrowableList() {
        List<TransformMetaDataCombi> transforms = this.getTransforms();
        if (CollUtil.isEmpty(transforms)) {
            return Collections.emptyList();
        }
        List<Throwable> list = Lists.newArrayList();
        for (TransformMetaDataCombi transform : transforms) {
            if (transform.transform instanceof AbstractProcessComp<?, ?> comp) {
                List<Throwable> throwables = comp.getThrowables();
                if (CollUtil.isEmpty(throwables)) {
                    continue;
                }
                list.addAll(throwables);
            }
        }
        return list;
    }

    public String getErrorText() {

        List<Throwable> throwableList = getThrowableList();
        if (CollUtil.isEmpty(throwableList)) {
            return null;
        }
        return throwableList.stream().map(v -> ExceptionUtil.stacktraceToString(v)).collect(Collectors.joining("\n"));
    }

    public void removeAllListener() {
        List<IExecutionStartedListener<IPipelineEngine<PipelineMeta>>> executionStartedListeners = getExecutionStartedListeners();
        executionStartedListeners.clear();
        List<IExecutionFinishedListener<IPipelineEngine<PipelineMeta>>> executionFinishedListeners = getExecutionFinishedListeners();
        executionFinishedListeners.clear();
        List<IExecutionStoppedListener<IPipelineEngine<PipelineMeta>>> executionStoppedListeners = getExecutionStoppedListeners();
        executionStoppedListeners.clear();
    }
}

