package com.bindatax.core.task;

import com.bindatax.core.async.executor.Async;
import com.bindatax.core.async.wrapper.WorkerWrapper;
import com.bindatax.core.database.operate.DatabaseOperateInterface;
import com.bindatax.core.dspool.DataTaskConnManager;
import com.bindatax.core.log.LogOutInterface;
import com.bindatax.core.log.LogOutManager;
import com.bindatax.core.meta.TaskMeta;
import com.bindatax.core.statistics.ExecuteStatistics;
import com.bindatax.core.model.ValueMetaData;
import com.bindatax.core.model.WriteDateConResult;
import com.bindatax.core.sqlformatter.SqlFormatter;
import com.bindatax.core.statistics.StatisticsData;
import com.bindatax.core.task.event.EventScope;
import com.bindatax.core.task.event.EventType;
import com.bindatax.core.task.event.ListenEventInterface;
import com.bindatax.core.task.row.EventRowUtil;
import com.bindatax.core.task.row.EventValueData;
import com.bindatax.core.task.segment.SegmentData;
import com.bindatax.core.task.segment.SegmentStatus;
import com.bindatax.core.task.segment.SegmentWork;
import com.bindatax.core.task.spi.TaskSpiManager;
import com.bindatax.core.util.DatabaseUtil;
import com.bindatax.core.util.StringUtils;
import org.pentaho.di.core.database.BaseDatabaseMeta;
import org.pentaho.di.core.row.ValueMetaInterface;

import java.sql.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.locks.ReentrantLock;

/**
 * 数据处理
 *
 * @author ChenZhenQin
 * @version V1.0.0
 * @date 16:07 2022/2/23
 */
public class DataTask implements Callable<DataTask> {

    private final static LogOutInterface LOG = LogOutManager.get();

    private String taskName;

    private long queryTotal = 0;//查询总数
    private String[] targetIds;
    private int dataQueueSize;

    private List<SegmentWork> segmentWorkList;

    // 输入之前数据库条数
    private long beforeInputNum = 0;
    // 输入以后数据库条数
    private long afterInputNum = 0;

    // 当前任务的SQL字段信息对象
    private ColumnMetaData columnMetaData = new ColumnMetaData();
    private String sourceConnKey;
    private String targetConnKey;
    private final Map<EventType,List<ListenEventInterface> > listenEventMap = new HashMap<>();

    private volatile boolean notifyTransStop = false;
    private Exception taskError;
    private final ReentrantLock STATISTICS_LOCK = new ReentrantLock();

    private final StatisticsData taskStatisticsData = StatisticsData.getLocal();
    private final ExecuteStatistics taskPreviousTaskStatistics = taskStatisticsData.getPreviousTaskStatistics();//上一个统计值
    private final ExecuteStatistics taskStatistics = taskStatisticsData.getTaskStatistics();//实时统计值

    private DatabaseOperateInterface targetDatabaseOperate;
    private TaskMeta taskMeta;
    private String runTimeSql;

    private Object parent;//父级

    public DataTask(TaskMeta taskMeta) {
        this(taskMeta,null);
    }
    public DataTask(TaskMeta taskMeta,Object parent) {
        this.taskMeta = taskMeta;
        this.parent = parent;
    }
    @Override
    public DataTask call() throws Exception {
        start();
        return this;
    }

    /**
     * 开始执行任务
     **/
    public void start() throws Exception {
        taskName = Thread.currentThread().getName();
        long start = System.currentTimeMillis();
        LOG.debug("开始执行任务[" + this + "]");
        try {
            exeEvent(EventType.TASK_INIT_BEFORE);
            initTask();
            exeEvent(EventType.TASK_INIT_AFTER);

            exeEvent(EventType.TASK_RUNNING_CREATE_CONN_BEFORE);
            buildConnection();//构建连接池
            exeEvent(EventType.TASK_RUNNING_CREATE_CONN_AFTER);

            exeEvent(EventType.TASK_RUNNING_SOURCE_QUERY_BEFORE);
            WriteDateConResult writeDateConResult = initSourceData();// 初始化源表元数据
            exeEvent(EventType.TASK_RUNNING_SOURCE_QUERY_AFTER);

            buildTargetDatabaseOperate();//获取目标数据库操作对象

            exeEvent(EventType.TASK_RUNNING_TARGET_META_BEFORE);
            initTargetData();// 初始化目标表元数据
            exeEvent(EventType.TASK_RUNNING_TARGET_META_AFTER);

            exeEvent(EventType.TASK_RUNNING_MAPPING_COLUMN_BEFORE);
            buildCommonColumns();// 构建公共列
            exeEvent(EventType.TASK_RUNNING_MAPPING_COLUMN_AFTER);

            exeEvent(EventType.TASK_RUNNING_WRITE_DB_BEFORE);
            write(writeDateConResult);// 执行写入数据
            if (taskMeta.isOpenTotalStatistics()) {
                taskStatistics.setUpdate((int) (afterInputNum - beforeInputNum));
            } else {
                for (int i = 0; i < 3; i++) {
                    LOG.warn("注意：转换已执行完成，但已关闭入库的更新数统计，则默认为输出数。");
                }
            }
            exeEvent(EventType.TASK_FINISH_SUCCESS);
        } catch (Exception e) {
            taskError = e;
            LOG.error("任务执行失败！", e);
            exeEvent(EventType.TASK_FINISH_FAIL);
            throw e;
        } finally {
            LOG.info("任务[" + this + "]执行结束,用时：" + (System.currentTimeMillis() - start) / 1000 + "秒");
            LOG.info(taskStatistics.descStatisticsInfo());
        }
    }

    private void initTask() throws Exception {
        int writeSegmentBatchSize = taskMeta.getWriteSegmentBatchSize();
        //总队列冗余多30%
        this.dataQueueSize = (int) (writeSegmentBatchSize * 1.3) * taskMeta.getWriteSegmentNumber();
        this.segmentWorkList = new ArrayList<>(taskMeta.getWriteSegmentNumber());
        this.runTimeSql = taskMeta.getQuerySql();
        // 初始化SPI监听事件
        EventType[] eventTypes = EventType.values();
        for (int i = 0; i < eventTypes.length; i++) {
            listenEventMap.put(eventTypes[i] , new ArrayList<>());
        }
        //是否加载公共服务事件
        List<ListenEventInterface> serviceEventList = TaskSpiManager.getTaskEventList();
        if(!serviceEventList.isEmpty()){
            for (int i = 0; i < serviceEventList.size(); i++) {
                ListenEventInterface addEvent = serviceEventList.get(i);
                addTaskEvent(addEvent,true);
            }
        }
    }

    /**
     * 连接数据库
     **/
    public void buildConnection() throws Exception {
        try {
            if (null == sourceConnKey) {
                LOG.info("开始构建 源库连接池。");
                sourceConnKey = DataTaskConnManager.buildConnection(
                        taskMeta.getSourceUrl()
                        , taskMeta.getSourceUser()
                        , taskMeta.getSourcePwd()
                        , taskMeta.getSourceDriver()
                        , taskMeta.getSourceValidationQuery()
                );
                LOG.info("连接池ConnKey：" + sourceConnKey);
            }
        } catch (Exception e) {
            LOG.error("无法构建 源库连接池。", e);
            throw e;
        }
        LOG.info("成功构建 源库连接池。");

        try {
            if (null == targetConnKey) {
                LOG.info("开始构建 目标库连接池。");
                targetConnKey = DataTaskConnManager.buildConnection(
                        taskMeta.getTargetUrl()
                        , taskMeta.getTargetUser()
                        , taskMeta.getTargetPwd()
                        , taskMeta.getTargetDriver()
                        , taskMeta.getTargetValidationQuery()
                );
                LOG.info("目标库ConnKey：" + targetConnKey);
            }
        } catch (Exception e) {
            LOG.error("无法构建 目标库连接池。", e);
            throw e;
        }
        LOG.info("成功构建 目标库连接池。");
    }

    private void readIds() {
        String targetIdName = taskMeta.getTargetKeys();// 判断是否存在列
        if (targetIdName != null && targetIdName.length() > 0) {
            targetIds = targetIdName.split(",");
        }
    }

    /**
     * 初始化数据源环境
     **/
    public WriteDateConResult initSourceData() throws Exception {
        WriteDateConResult writeDateConResult = new WriteDateConResult();
        LOG.info(LogOutManager.CR + "初始化源数据库连接环境，获取动态SQL的字段信息。" + LogOutManager.CR
                + "本次执行查询数据的SQL语句为（已格式化SQL输出，不是执行格式）：" + LogOutManager.CR + SqlFormatter.format(this.runTimeSql) + LogOutManager.CR);
        Connection sourceConn = null;
        PreparedStatement sourceStatement = null;
        ResultSet sourceResultSet = null;
        try {
            // 执行动态sql
            LOG.info("开始执行动态SQL执行查询。");
            sourceConn = DataTaskConnManager.getConnection(sourceConnKey);
            sourceStatement = sourceConn.prepareStatement(this.runTimeSql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
            sourceStatement.setFetchSize(taskMeta.getWriteSegmentBatchSize());
            sourceResultSet = sourceStatement.executeQuery();

            if(taskMeta.isQueryTotal()){
                sourceResultSet.last();
                queryTotal = sourceResultSet.getRow();
                sourceResultSet.beforeFirst();
                LOG.info("执行SQL查询完成！查询出来的条数为 ： " + queryTotal + "条");
            }else {
                LOG.debug("已关闭查询总数。");
            }

            ConcurrentHashMap<String, ValueMetaData> sourceValueMetaDataMap = columnMetaData.getSourceColumnNameMap();
            sourceValueMetaDataMap.clear();
            ResultSetMetaData sourceMetaData = sourceResultSet.getMetaData();
            LOG.info("开始获取查询SQL执行结果集的所有字段属性：" + LogOutManager.CR + this.runTimeSql);
            List<ValueMetaData> valueMetaDataList = DatabaseUtil.getValueFromSQLTypeList(BaseDatabaseMeta.DATABASE_ORACLE_PLUGINID
                    , sourceMetaData);
            int size = valueMetaDataList.size();
            columnMetaData.setSourceColumnNameList(valueMetaDataList);
            if (size > 0) {
                StringBuilder sb = new StringBuilder();
                sb.append("共查询到源结果集有").append(size).append("个字段属性数据：").append(LogOutManager.CR);
                for (int i = 0; i < size; i++) {
                    ValueMetaData valueMetaData = valueMetaDataList.get(i);
                    ValueMetaInterface vm = valueMetaData.getValueMetaInterface();
                    sourceValueMetaDataMap.put(vm.getName().toUpperCase(), valueMetaData);
                    sb.append("===》查询结果集 --- 字段名:").append(vm.getName())
                            .append(", 数据库字段类型:").append(vm.getSqlTypeDesc())
                            .append(" , 对应的系统字段类型:").append(vm.getTypeDesc())
                            .append(LogOutManager.CR);
                }
                LOG.info(sb.toString());
            } else {
                throw new Exception("无法获取到查询SQL的字段信息！" + LogOutManager.CR + this.runTimeSql);
            }
            writeDateConResult.setSourceConn(sourceConn);
            writeDateConResult.setSourceStatement(sourceStatement);
            writeDateConResult.setSourceResultSet(sourceResultSet);
        } catch (Exception e) {
            LOG.error("执行编译SQL获取源字段时失败！请检查相关占位符是否正确等", e);
            DataTaskConnManager.close(sourceResultSet, sourceStatement, sourceConn);
            throw e;
        }
        return writeDateConResult;
    }

    /**
     * 获取目标数据库操作对象
     * @throws Exception
     */
    private void buildTargetDatabaseOperate() throws Exception{
        //从数据库连接获取产品标识
        Connection connection = null;
        try {
            connection = DataTaskConnManager.getConnection(targetConnKey);
            DatabaseMetaData databaseMetaData = connection.getMetaData();
            String databaseProductName = databaseMetaData.getDatabaseProductName();
            String databaseProductVersion = databaseMetaData.getDatabaseProductVersion();
            List<DatabaseOperateInterface> databaseOperateList = TaskSpiManager.getDatabaseOperateList();
            for (int i = 0; i < databaseOperateList.size(); i++) {
                DatabaseOperateInterface operate = databaseOperateList.get(i);
                if(operate.checkDatabaseType(databaseProductName,databaseProductVersion)){
                    targetDatabaseOperate = operate;
                    break;
                }
            }
            if(targetDatabaseOperate == null){
                String err = "目标数据库名称：" +databaseProductName+ ",目标数据库版本：" + databaseProductVersion +
                        "。错误：无法找到目标数据库操作实例！请检查lib文件夹下的[BinDatax-spi-database-]前缀的jar文件";
                throw new Exception(err);
            }
        }finally {
            DataTaskConnManager.close(null,null,connection);
        }
    }

    /**
     * 初始化目标库环境
     **/
    public void initTargetData() throws Exception {
        // 获取字段
        ConcurrentHashMap<String, ValueMetaData> targetValueMetaDataMap = columnMetaData.getTargetColumnNameMap();
        targetValueMetaDataMap.clear();
        LOG.info("开始初始化目标库环境，获取目标表字段信息数据。");
        Connection targetConn = null;
        PreparedStatement targetStatement = null;
        ResultSet targetResultSet = null;
        try {
            targetConn = DataTaskConnManager.getConnection(targetConnKey);
            String sql = targetDatabaseOperate.getLimitSql(getTargetFullTableName());
            targetStatement = targetConn.prepareStatement(sql);
            targetResultSet = targetStatement.executeQuery();
            ResultSetMetaData targetMetaData = targetResultSet.getMetaData();
 
            LOG.info("开始获取目标表的对应的字段属性：" + LogOutManager.CR + sql);
            List<ValueMetaData> valueMetaDataList = DatabaseUtil.getValueFromSQLTypeList(BaseDatabaseMeta.DATABASE_ORACLE_PLUGINID, targetMetaData);
            int size = valueMetaDataList.size();
            columnMetaData.setTargetColumnNameList(valueMetaDataList);
            if (size > 0) {
                StringBuilder sb = new StringBuilder();
                sb.append("共查询到目标表有").append(size).append("个字段属性数据：").append(LogOutManager.CR);
                for (int i = 0; i < size; i++) {
                    ValueMetaData valueMetaData = valueMetaDataList.get(i);
                    ValueMetaInterface vm = valueMetaData.getValueMetaInterface();
                    targetValueMetaDataMap.put(vm.getName().toUpperCase(), valueMetaData);
                    sb.append("===目标表 --- 字段名:").append(vm.getName())
                            .append(", 数据库字段类型:").append(vm.getSqlTypeDesc())
                            .append(" , 对应的系统字段类型:").append(vm.getTypeDesc())
                            .append(LogOutManager.CR);
                }
                LOG.info(sb.toString());
            } else {
                throw new Exception("无法获取到目标表的字段信息！" + LogOutManager.CR + sql);
            }
        } catch (Exception e) {
            LOG.error("初始化目标库环境获取目标表字段信息数据失败！", e);
            throw e;
        } finally {
            DataTaskConnManager.close(targetResultSet, targetStatement, targetConn);
        }

        readIds();//读取id
        if (targetIds == null || targetIds.length == 0) {
            // 判断是否自定检测主键
            if (taskMeta.isTargeAutoCheckKey()) {
                targetIds = targetDatabaseOperate.getPrimaryKeys(this);
            } else {
                LOG.info("没有设置主键列表但设置了不自动检测，跳过获取目标表主键信息。");
            }
        }

        if (targetIds != null && targetIds.length > 0) {
            //重新把字段名排序
            List<ValueMetaData> targetColumnNameList = columnMetaData.getTargetColumnNameList();
            List<ValueMetaData> newValueMetaDataList = new ArrayList<>(targetColumnNameList.size());
            HashSet<String> idSet = new HashSet<>();
            for (int i = 0; i < targetIds.length; i++) {
                ValueMetaData idMeta = targetValueMetaDataMap.get(targetIds[i].toUpperCase());
                if (idMeta == null) {// 在此处检测联合字段是否存在目标表
                    String msg = "目标表不存在的需要执行的主键字段，请修改联合主键字段列表！：" + targetIds[i];
                    LOG.error(msg);
                    throw new Exception(msg);
                }
                newValueMetaDataList.add(idMeta);
                idSet.add(targetIds[i]);
            }
            for (int i = 0; i < targetColumnNameList.size(); i++) {
                ValueMetaData oldMeta = targetColumnNameList.get(i);
                if (!idSet.contains(oldMeta.getName())) {
                    newValueMetaDataList.add(oldMeta);
                }
            }
            // 放入排序的字段列表
            columnMetaData.setTargetColumnNameList(newValueMetaDataList);
        }
    }

    private long getTargetRowNum() {
        long num = 0;
        String targetTable = getTargetFullTableName();
        Connection targetConn = null;
        PreparedStatement targetStatement = null;
        ResultSet targetResultSet = null;
        try {
            LOG.info("统计目标表[" + targetTable + "]的数据行数");
            targetConn = DataTaskConnManager.getConnection(targetConnKey);
            targetStatement = targetConn.prepareStatement("SELECT COUNT(1) AS NUM FROM " + targetTable);
            targetResultSet = targetStatement.executeQuery();
            while (targetResultSet.next()) {
                num = targetResultSet.getLong("NUM");
                break;
            }
        } catch (Exception e) {
            LOG.error("统计目标表[" + targetTable + "]的数据行数失败！", e);
        } finally {
            DataTaskConnManager.close(targetResultSet, targetStatement, targetConn);
        }
        LOG.info("目标表[" + targetTable + "]数据行数：" + num + "条");
        return num;
    }

    /**
     * 构建数据源集合与目标表的公共字段
     **/
    public void buildCommonColumns() {
        Set<ValueMetaData> columnNames = columnMetaData.getColumnNames();
        columnNames.clear();
        LOG.info("开始对SQL执行结果和目标表字段做映射");
        List<ValueMetaData> sourceColumnNameList = columnMetaData.getSourceColumnNameList();
        ConcurrentHashMap<String, ValueMetaData> targetColumnNameMap = columnMetaData.getTargetColumnNameMap();
        for (int i = 0; i < sourceColumnNameList.size(); i++) {
            ValueMetaData sourceColumnName = sourceColumnNameList.get(i);
            if (targetColumnNameMap.containsKey(sourceColumnName.getName())) {
                columnNames.add(sourceColumnName);
            }
        }
        if (columnNames.isEmpty()) {
            throw new RuntimeException("源表和目标表的字段结构不一致！");
        }
    }

    public void write(WriteDateConResult writeDateConResult) throws Exception {
        if (taskMeta.isOpenTotalStatistics()) {
            beforeInputNum = getTargetRowNum();
            LOG.info("写入目标表[" + getTargetFullTableName() + "]之前的数据行数：" + beforeInputNum);
        } else {
            LOG.info("已关闭入库数统计！不对目标表进行入库之前统计。");
        }

        String sql = targetDatabaseOperate.getWriteSql(this);
        if(sql == null || sql.length() == 0){
            String err ="无效的目标库写入SQL语句：无法找到目标数据库操作实例！请检查lib文件夹下的[BinDatax-spi-database-]前缀的jar文件";
            throw new Exception(err);
        }
        if (taskMeta.getWriteSegmentNumber() > 1) {//分段数大于1，使用分段并发入库
            writeToDatabaseBySegment(writeDateConResult, sql);
        } else {
            writeToDatabaseBySerial(writeDateConResult, sql);//否则串行入库
        }
        // 统计写入以后的数据量
        if (taskMeta.isOpenTotalStatistics()) {
            afterInputNum = getTargetRowNum();
            LOG.info("写入目标表[" + getTargetFullTableName() + "]之后的数据行数为：" + afterInputNum);
        } else {
            LOG.info("已关闭入库数统计！不对目标表进行入库之后的统计。");
        }
    }

    /**
     * 串行写入数据
     *
     * @param writeDateConResult
     * @param sql
     * @throws Exception
     */
    public void writeToDatabaseBySerial(WriteDateConResult writeDateConResult, String sql) throws Exception {
        // 构建缓存数据集合
        List<ValueMetaData> sourceColumnNameList = columnMetaData.getSourceColumnNameList();
        int sourceColumnNameSize = sourceColumnNameList.size();

        int batchSize = taskMeta.getWriteSegmentBatchSize();
        List<Object[]> batchBuffRowsList = new ArrayList<Object[]>(batchSize);
        for (int i = 0; i < batchSize; i++) {
            Object[] sourceColRows = new Object[sourceColumnNameSize];
            batchBuffRowsList.add(sourceColRows);
        }
        ResultSet sourceResultSet = writeDateConResult.getSourceResultSet();
        Connection targetConn = null;
        PreparedStatement targetStatement = null;
        ExecuteStatistics tempStatistics = new ExecuteStatistics();
        int insertTotalLineSize = 0;//行数
        try {
            targetConn = DataTaskConnManager.getConnection(targetConnKey);
            targetConn.setAutoCommit(false);
            targetStatement = targetConn.prepareStatement(sql);

            int rowLimit = 0;//行游标
            while (sourceResultSet.next()) {
                if(notifyTransStop){
                    break;
                }
                // 把数据缓存到批次列表中
                // 把当前缓存行拿出来
                Object[] sourceColRows = batchBuffRowsList.get(rowLimit);
                // 获取源结果集的行值
                for (int i = 0; i < sourceColumnNameSize; i++) {
                    ValueMetaData sourceMetaData = sourceColumnNameList.get(i);
                    ValueMetaInterface commValueMeta = sourceMetaData.getValueMetaInterface();
                    String resColName = commValueMeta.getName();
                    // 在这判断
                    Object value;
                    if(targetDatabaseOperate.checkTransValue(commValueMeta)){
                        value = targetDatabaseOperate.transValue(sourceResultSet,commValueMeta);
                    } else {
                        value = sourceResultSet.getObject(resColName);
                    }
                    EventValueData eventValueData = EventRowUtil.getEventRowData();
                    //放入线程的本地变量
                    eventValueData.addValue(rowLimit, sourceMetaData, value);
                    // 事件通知
                    exeEvent(EventType.TASK_RUNNING_QUERY_VALUE_DATA);
                    // 然后取回值
                    sourceColRows[i] = eventValueData.getValue();
                    eventValueData.clear();//清除数据
                }

                // 记录行号
                insertTotalLineSize++;
                rowLimit++;
                // 满一个批次了，写入库中。
                if (rowLimit >= batchSize) {
                    //构建事件行数据
                    long batchId = System.currentTimeMillis();
                    executeBatch(targetConn, targetStatement, batchBuffRowsList, taskMeta.getFailTryNum()
                            , taskMeta.getFailTryInterval() * 1000, tempStatistics, batchId , null );
                    addStatistics( null, tempStatistics);
                    String info;
                    if(taskMeta.isQueryTotal()){
                        info = String.format("入库进度详情：查询总数为%s条，已成功入库%s条，失败入库%s条，入库预计进度为：%s ",
                                queryTotal, (insertTotalLineSize - taskStatistics.getError()), taskStatistics.getError(), StringUtils.percent(insertTotalLineSize, queryTotal));
                    }else {
                        info = String.format("入库进度详情：已成功入库%s条，失败入库%s条", (insertTotalLineSize - taskStatistics.getError()), taskStatistics.getError()  );
                    }
                    LOG.debug(info);
                    // 重置行游标
                    rowLimit = 0;
                    tempStatistics.clear();
                }
            }
            // 还有剩余的呗
            if (rowLimit > 0) {
                long batchId = System.currentTimeMillis();
                List<Object[]> remainingList = batchBuffRowsList.subList(0, rowLimit);
                executeBatch(targetConn, targetStatement, remainingList, taskMeta.getFailTryNum()
                        , taskMeta.getFailTryInterval() * 1000, tempStatistics, batchId , null);
                addStatistics( null,tempStatistics );

                String info;
                if(taskMeta.isQueryTotal()){
                    info = String.format("入库进度详情：查询总数为%s条，已成功入库%s条，失败入库%s条，入库预计进度为：%s ",
                            queryTotal, (insertTotalLineSize - taskStatistics.getError()), taskStatistics.getError(), StringUtils.percent(insertTotalLineSize, queryTotal));
                }else {
                    info = String.format("入库进度详情：已成功入库%s条，失败入库%s条", (insertTotalLineSize - taskStatistics.getError()), taskStatistics.getError()  );
                }
                LOG.debug(info);
                tempStatistics.clear();
                // 批次执行完成，没有错误。
            }
            targetStatement.clearBatch();
        } catch (Exception e) {//入库失败了！到这里就直接停止了。行数计算相关已经做了。此处处理数据库连接相关就行。
            LOG.error("入库执行失败。", e);
            throw e;
        } finally {
            addStatistics( null, tempStatistics );
            tempStatistics.clear();

            Connection sourceConn = writeDateConResult.getSourceConn();
            PreparedStatement sourceStatement = writeDateConResult.getSourceStatement();
            writeDateConResult.setSourceConn(null);
            writeDateConResult.setSourceStatement(null);
            writeDateConResult.setSourceResultSet(null);
            DataTaskConnManager.close(sourceResultSet, sourceStatement, sourceConn);
            DataTaskConnManager.close(null, targetStatement, targetConn);
        }
    }

    /**
     * 分片写入数据
     *
     * @param writeDateConResult
     * @param sql
     * @throws Exception
     */
    public void writeToDatabaseBySegment(WriteDateConResult writeDateConResult, String sql) throws Exception {
        int writeSegmentNumber = taskMeta.getWriteSegmentNumber();
        final ArrayBlockingQueue<Object[]> allSegmentDataQueue = new ArrayBlockingQueue<>(dataQueueSize);

        String groupName = Thread.currentThread().getName();
        ThreadFactory segmentThreadFactory = new ThreadFactory() {
            private int threadNumber = 1;

            @Override
            public Thread newThread(Runnable r) {
                String name = groupName + "-分片任务-" + (threadNumber + 1);
                Thread thread = new Thread(r, name);
                threadNumber++;
                return thread;
            }
        };
        //构造线程池
        ThreadPoolExecutor segmentThreadPoolExecutor = new ThreadPoolExecutor(
                writeSegmentNumber,
                writeSegmentNumber,
                10,
                TimeUnit.SECONDS,
                new ArrayBlockingQueue<>(writeSegmentNumber),
                segmentThreadFactory,
                new ThreadPoolExecutor.CallerRunsPolicy()
        );

        List<WorkerWrapper> workerWrapperList = new ArrayList<WorkerWrapper>(writeSegmentNumber);
        for (int i = 0; i < writeSegmentNumber; i++) {
            SegmentData segmentData = createSegmentData(sql);
            segmentData.allSegmentDataQueue = allSegmentDataQueue;
            SegmentWork segmentWork = new SegmentWork(segmentData);
            segmentWorkList.add(segmentWork);

            WorkerWrapper<SegmentData, SegmentData> workerWrapper = new WorkerWrapper.Builder<SegmentData, SegmentData>()
                    .worker(segmentWork)
                    .callback(segmentWork)
                    .param(segmentData)
                    .build();
            workerWrapperList.add(workerWrapper);
        }
        // 让线程持续入库一天才算超时，这个不重要，在外层进行对子线程的超时控制。
        // 开启监听线程
        Thread startAsync = new Thread(new Runnable() {
            @Override
            public void run() {
                try {
                    Async.beginWork(100000 * 1000L, segmentThreadPoolExecutor, workerWrapperList);
                    LOG.info("数据片线程组执行完成。");
                } catch (Exception e) {
                    LOG.error("数据片线程组执行失败！", e);
                } finally {
                    //等待完成
                    Async.shutDown();
                }
            }
        });
        startAsync.setName(this.toString());
        startAsync.start();
        // 构建缓存数据集合
        List<ValueMetaData> sourceColumnNameList = columnMetaData.getSourceColumnNameList();
        int sourceColumnNameSize = sourceColumnNameList.size();
        ResultSet sourceResultSet = writeDateConResult.getSourceResultSet();
        // 阻塞超时时间 秒
        int awaitOutTime = taskMeta.getWriteAwaitOutTime();
        try {
            int rowLimit = 0;//行游标
            while (sourceResultSet.next()) {
                if(notifyTransStop){//如果外部通知停止任务，则应该不再获取数据。
                    break;
                }
                // 把数据缓存到批次列表中
                Object[] sourceColRows = new Object[sourceColumnNameSize];
                // 获取源结果集的行值
                for (int i = 0; i < sourceColumnNameSize; i++) {
                    ValueMetaData sourceMetaData = sourceColumnNameList.get(i);
                    ValueMetaInterface commValueMeta = sourceMetaData.getValueMetaInterface();
                    String resColName = commValueMeta.getName();
                    // 在这判断
                    Object value;
                    if(targetDatabaseOperate.checkTransValue(commValueMeta)){
                        value = targetDatabaseOperate.transValue(sourceResultSet,commValueMeta);
                    } else {
                        value = sourceResultSet.getObject(resColName);
                    }
                    EventValueData eventValueData = EventRowUtil.getEventRowData();
                    //放入线程的本地变量
                    eventValueData.addValue(rowLimit, sourceMetaData, value);
                    // 事件通知
                    exeEvent(EventType.TASK_RUNNING_QUERY_VALUE_DATA);
                    // 然后取回值
                    sourceColRows[i] = eventValueData.getValue();
                    eventValueData.clear();//清除数据
                }
                rowLimit++;
                if (checkCanWriteData(segmentWorkList)) {//检测队列执行状态
                    int _outTime = 0;
                    while (true) {
                        if (notifyTransStop) {
                            break;
                        }
                        //把查出来的数据放到数据队列里面，分发给多个数据分片线程。
                        boolean state = allSegmentDataQueue.offer(sourceColRows, 1000, TimeUnit.MILLISECONDS);
                        if (state) {
                            break;
                        }
                        if(awaitOutTime <= 0){
                            continue;
                        }
                        if (_outTime >= awaitOutTime) {//此时已经入库超时，结束全部子线程，并抛出异常。
                            throw new Exception("数据入库超时！");
                        }
                        //如果放入失败，则先暂停一下
                        _outTime++;
                        LOG.info("程序入库数据队列已满，已等待" + _outTime + "秒....");
                    }
                } else {
                    throw new Exception("未入库完成，分片线程已全部停止，数据入库失败！");
                }
            }
            doneAllSegment();
        } catch (Exception e) {//入库失败了！到这里就直接停止了。行数计算相关已经做了。
            stopAllSegmentErrorStop();
            LOG.error("入库执行失败。", e);
            throw e;
        } finally {
            try {
                if (awaitOutTime <= 0) {
                    LOG.warn("警告，已设置当前任务等待数据片线程执行结束时间：无期限");
                    startAsync.join();
                }else {
                    long waitStartTime = System.currentTimeMillis();
                    startAsync.join(awaitOutTime * 1000L);
                    long waitEndTime = System.currentTimeMillis();
                    if(waitEndTime - waitStartTime <= awaitOutTime * 1000L){
                        LOG.info("当前任务数据片线程组已执行结束。");
                    }else {
                        long startTime = System.currentTimeMillis();
                        LOG.info("当前任务数据片线程组未执行完成，已超时，通知数据片线程组停止当前任务。");
                        //通知停止
                        stopAllSegmentErrorStop();
                        boolean isAllSegmentWorkStop = false;
                        for (int i = 0; i < 3; i++) {
                            isAllSegmentWorkStop = isAllSegmentWorkStop(segmentWorkList);
                            if(isAllSegmentWorkStop){
                                break;
                            }else {
                                Thread.sleep(1000);
                                LOG.info("当等数据片线程组响应停止执行：" + (i+1)+"秒....");
                            }
                        }
                        if(isAllSegmentWorkStop){
                            LOG.info("数据片线程组已停止，等待耗时：" + (System.currentTimeMillis() - startTime) + "毫秒");
                        }else {
                            LOG.warn("当等数据片线程组未响应停止，尝试中断停止。");
                            for (int i = 0; i < segmentWorkList.size(); i++) {
                                SegmentWork segmentWork = segmentWorkList.get(i);
                                segmentWork.interruptSegment();
                            }
                        }
                    }
                }
            } catch (Exception ignored) {}
            //此处处理数据库连接相关就行。
            Connection sourceConn = writeDateConResult.getSourceConn();
            PreparedStatement sourceStatement = writeDateConResult.getSourceStatement();
            writeDateConResult.setSourceConn(null);
            writeDateConResult.setSourceStatement(null);
            writeDateConResult.setSourceResultSet(null);
            DataTaskConnManager.close(sourceResultSet, sourceStatement, sourceConn);
        }
    }

    private boolean checkCanWriteData(List<SegmentWork> segmentWorkList) {
        boolean canWriteData = false;
        for (int i = 0; i < segmentWorkList.size(); i++) {
            SegmentWork segmentWork = segmentWorkList.get(i);
            canWriteData = segmentWork.getSegmentData().status.canWriteData();
            if (canWriteData) {
                break;
            }
        }
        return canWriteData;
    }
    private boolean isAllSegmentWorkStop(List<SegmentWork> segmentWorkList) {
        for (int i = 0; i < segmentWorkList.size(); i++) {
            SegmentWork segmentWork = segmentWorkList.get(i);
            if (segmentWork.getSegmentData().status != SegmentStatus.FINISH) {
                return false;
            }
        }
        return true;
    }

    private SegmentData createSegmentData(String sql) {
        SegmentData segmentData = new SegmentData();
        segmentData.parent = this;
        segmentData.connKey = targetConnKey;
        segmentData.sql = sql;
        return segmentData;
    }

    public void stopAllSegmentErrorStop() {
        if (this.segmentWorkList.isEmpty()) {
            return;
        }
        for (int i = 0; i < this.segmentWorkList.size(); i++) {
            SegmentWork segmentWork = this.segmentWorkList.get(i);
            segmentWork.notifyErrorStop();
        }
    }

    public void doneAllSegment() {
        if (this.segmentWorkList.isEmpty()) {
            return;
        }
        for (int i = 0; i < this.segmentWorkList.size(); i++) {
            SegmentWork segmentWork = this.segmentWorkList.get(i);
            segmentWork.notifyDone();
        }
    }

    /**
     * 执行入库
     *
     * @param targetStatement
     * @param batchBuffRowsList
     * @param tryNum            尝试次数
     * @param tryNum            是否需要统计
     * @throws Exception
     */
    public void executeBatch(Connection targetConn, PreparedStatement targetStatement
            , List<Object[]> batchBuffRowsList, int tryNum, int stopTimeInSeconds, ExecuteStatistics batchStatistics
            , long batchId , SegmentWork thisSegmentWork) throws Exception {
        exeEvent(thisSegmentWork , EventType.TASK_RUNNING_WRITE_BATCH_BEFORE);
        String executeBatchMsg = "批次"+batchId + " - ";
        if (batchBuffRowsList == null || batchBuffRowsList.isEmpty()) {
            LOG.warn(executeBatchMsg + "批量数据列表为空，不执行入库！");
            return;
        }

        int dataRowSize = batchBuffRowsList.size();
        LOG.info(executeBatchMsg + "批量数据列表数据量 " + dataRowSize + "条，准备执行入库。");
        DatabaseUtil.setConn(targetConn);

        List<Object[]> repetitiveBuffRowsList = new ArrayList<>();// 重复的
        boolean hasTargetId = targetIds != null && targetIds.length > 0;
        // 检测是否存在主键冲突
        if ( taskMeta.isWriteSplitDuplicatePrimary() && hasTargetId) {
            ConcurrentHashMap<String, ValueMetaData> sourceColumnNameMap = columnMetaData.getSourceColumnNameMap();
            List<Object[]> subBatchBuffRowsList = new ArrayList<>(dataRowSize/5);// 正常的
            HashSet<String> repetitiveKeys = new HashSet<>(dataRowSize/5);

            StringBuilder keys = new StringBuilder();
            LOG.info("检测数据是否存在主键冲突。");
            for (int kk = 0; kk < dataRowSize; kk++) {
                keys.setLength(0);
                Object[] row = batchBuffRowsList.get(kk);
                for (int i = 0; i < targetIds.length; i++) {
                    ValueMetaData sourceMetaData = sourceColumnNameMap.get(targetIds[i]);
                    // 如果联合主键的字段，没有匹配到源表的字段，则为null
                    if(sourceMetaData == null){
                        keys.append("null");
                    }else {
                        Object value = row[sourceMetaData.getCloIndex() - 1];
                        keys.append(sourceMetaData.getValueMetaInterface().getString(value));
                    }
                }
                if(repetitiveKeys.contains(keys.toString())){
                    repetitiveBuffRowsList.add(row);
                }else {
                    subBatchBuffRowsList.add(row);
                    repetitiveKeys.add(keys.toString());
                }
            }
            if(repetitiveBuffRowsList.isEmpty()){
                LOG.info("本批次数据不存在主键冲突。");
            }else {
                LOG.warn("本批次数据存在主键冲突条数：" + repetitiveBuffRowsList.size() + "条");
                batchBuffRowsList = subBatchBuffRowsList;
            }
        }
        if(!repetitiveBuffRowsList.isEmpty()){
            LOG.info("优先入库无主键冲突批次数据：" + batchBuffRowsList.size() + "条");
        }
        // 无论是完整的还是去重后的 先入库
        addBatchData(targetConn, targetStatement, batchBuffRowsList,  tryNum,  stopTimeInSeconds, batchStatistics
                , thisSegmentWork , executeBatchMsg );

        if(!repetitiveBuffRowsList.isEmpty()){
            LOG.info("再入库主键冲突的批次数据：" + repetitiveBuffRowsList.size() + "条");
            ExecuteStatistics repetitiveBatchStatistics = new ExecuteStatistics();
            try {
                addBatchData(targetConn, targetStatement
                        , repetitiveBuffRowsList,  tryNum,  stopTimeInSeconds, repetitiveBatchStatistics
                        , thisSegmentWork , executeBatchMsg );
            }finally {
                batchStatistics.addValue(repetitiveBatchStatistics);
            }
        }
    }

    private void addBatchData(Connection targetConn, PreparedStatement targetStatement
            , List<Object[]> batchBuffRowsList, int tryNum, int stopTimeInSeconds, ExecuteStatistics batchStatistics
            , SegmentWork thisSegmentWork , String executeBatchMsg ) throws Exception {
        int dataRowSize = batchBuffRowsList.size();
        Exception insertError = null;
        for (int i = 0; i < tryNum; i++) {
            try {
                LOG.debug(executeBatchMsg + "第" + (i + 1) + "次批量执行入库.");
                targetStatement.clearBatch();
                LOG.debug(executeBatchMsg + "addBatchValue，请等待....");
                addBatchValue(targetStatement, batchBuffRowsList);
                // 语句执行完毕，提交本事务
                LOG.debug(executeBatchMsg + "addBatchValue完成，准备executeBatch，请等待....");
                targetStatement.executeBatch();
                LOG.debug(executeBatchMsg + "executeBatch完成，准备commit本次事务，请等待....");
                targetConn.commit();
                LOG.debug(executeBatchMsg + "事务commit成功，提交完成！");
                break;
            } catch (Exception e) {
                // 回滚这一批数据
                targetConn.rollback();
                insertError = e;
                Thread.sleep(stopTimeInSeconds);
                String msg = executeBatchMsg + "批次入库发生错误！" + (stopTimeInSeconds / 1000) + "秒后再次执行！";
                LOG.error(msg, e);
            }
        }
        if (insertError == null) {
            exeEvent(thisSegmentWork , EventType.TASK_RUNNING_WRITE_BATCH_AFTER_SUCCESS);
            LOG.debug(executeBatchMsg + "批次入库完成，该批次无错误。");
            batchStatistics.setInput(dataRowSize);
            batchStatistics.setOutput(dataRowSize);
            batchStatistics.setUpdate(dataRowSize);
        } else {
            exeEvent(thisSegmentWork , EventType.TASK_RUNNING_WRITE_BATCH_AFTER_FAIL);
            // 如果不忽略错误，则抛出异常。
            if (!taskMeta.isErrorIgnore()) {
                LOG.debug(executeBatchMsg + "不忽略错误方式入库：批次入库失败！（已重试" + tryNum + "次）。");
                LOG.debug(executeBatchMsg + "以主键方式(主键列名：" + taskMeta.getTargetKeys() + ")批量写入/更新到数据库时发生错误！", insertError);
                batchStatistics.setError(dataRowSize);
                batchStatistics.setReject(dataRowSize);
                throw insertError;
            }
            LOG.debug(executeBatchMsg + "忽略错误方式入库：入库批次数据失败，现在尝试按行入库。");
            // 否则转为按条处理
            int batchSize = batchBuffRowsList.size();
            int ignoreLogOutMaxSize = taskMeta.getLogOutErrorMaxSize();//最多输出多少条错误数据
            int _errorTotal = 0;//错误总数
            int errorOutSize = 0;//实际输出的错误数
            StringBuilder errorMsg = new StringBuilder();
            exeEvent(thisSegmentWork , EventType.TASK_RUNNING_WRITE_BETWEEN_BATCH_FAIL_AND_ROW_BEFORE);

            for (int rowNo = 0; rowNo < batchSize; rowNo++) {
                Object[] sourceColRows = batchBuffRowsList.get(rowNo);
                try {
                    targetStatement.clearBatch();
                    addSingleValue(rowNo , targetStatement, sourceColRows);
                    targetStatement.addBatch();
                    targetConn.commit();
                    exeEvent(thisSegmentWork , EventType.TASK_RUNNING_WRITE_ROW_AFTER_SUCCESS);
                } catch (Exception e) {
                    exeEvent(thisSegmentWork , EventType.TASK_RUNNING_WRITE_ROW_AFTER_FAIL);
                    _errorTotal++;
                    // 回滚数据
                    targetConn.rollback();
                    if (ignoreLogOutMaxSize != 0) {
                        if (ignoreLogOutMaxSize < 0 || rowNo < ignoreLogOutMaxSize) {
                            errorOutSize++;
                            errorMsg.append("-----------行错误信息块开始>>>>>>>>>>").append(LogOutManager.CR)
                                    .append("批次数据的第").append(rowNo + 1).append("行入库失败！（已忽略错误！）：")
                                    .append(LogOutManager.CR).append(StringUtils.getExceptionInfo(e))
                                    .append(LogOutManager.CR).append("<<<<<<<<<<行错误信息块结束-----------").append(LogOutManager.CR);
                        }
                    }
                }
            }
            if (_errorTotal == 0) {
                batchStatistics.setInput(dataRowSize);
                batchStatistics.setOutput(dataRowSize);
                batchStatistics.setUpdate(dataRowSize);
                LOG.info(executeBatchMsg + "转为按行处理后入库无报错！");
            } else {
                if (ignoreLogOutMaxSize == 0 || errorOutSize == 0) {
                    LOG.warn(executeBatchMsg + "按行执行出错，但是已设置不输出错误日志！该批次总数" + batchSize + "，错误入库数：" + _errorTotal + "条");
                } else {
                    String msg = "按行执行出错，该批次总数" + batchSize + "，错误入库数：" + _errorTotal + "条";
                    if (errorOutSize < _errorTotal) {
                        errorMsg.append(".....还有").append(_errorTotal - errorOutSize).append("条数据入库错误信息已被忽略。").append(LogOutManager.CR);
                    }
                    msg += errorMsg;
                    LOG.error(executeBatchMsg + msg);
                }
                batchStatistics.setInput(dataRowSize - _errorTotal);
                batchStatistics.setOutput(dataRowSize - _errorTotal);
                batchStatistics.setUpdate(dataRowSize - _errorTotal);
                batchStatistics.setError(_errorTotal);
                batchStatistics.setReject(_errorTotal);
            }
        }
    }


    /**
     * 批量写入
     *
     * @param targetStatement
     * @param batchBuffRowsList
     * @throws Exception
     */
    public void addBatchValue(PreparedStatement targetStatement, List<Object[]> batchBuffRowsList) throws Exception {
        // 按行写入
        for (int rowNo = 0; rowNo < batchBuffRowsList.size(); rowNo++) {
            Object[] sourceColRows = batchBuffRowsList.get(rowNo);//源结果集
            addSingleValue(rowNo,targetStatement, sourceColRows);
            targetStatement.addBatch();
        }
    }

    /**
     * 单行写入
     *
     * @param targetStatement
     * @throws Exception
     */
    public void addSingleValue(int rowNo, PreparedStatement targetStatement, Object[] sourceColRows) throws Exception {
        ConcurrentHashMap<String, ValueMetaData> sourceColumnNameMap = columnMetaData.getSourceColumnNameMap();
        ConcurrentHashMap<String, ValueMetaData> targetColumnNameMap = columnMetaData.getTargetColumnNameMap();
        List<ValueMetaData> targetColumnNameList = columnMetaData.getTargetColumnNameList();
        boolean hasTargetId = targetIds != null && targetIds.length > 0;

        // 如果是有主键列表的，则需要加进去
        if (hasTargetId) {
            // 按序放入主键
            for (int i = 0; i < targetIds.length; i++) {
                // 判断主键是否存在目标表里面
                ValueMetaData targetMetaData = targetColumnNameMap.get(targetIds[i]);
                ValueMetaData sourceMetaData = sourceColumnNameMap.get(targetIds[i]);
                // 如果联合主键的字段，没有匹配到源表的字段，则为null
                Object value = null;
                if (sourceMetaData != null) {
                    value = sourceColRows[sourceMetaData.getCloIndex() - 1];
                }
                DatabaseUtil.setValue(targetStatement, targetMetaData.getValueMetaInterface(), value, i + 1);
            }
        }
        int targetColSize = targetColumnNameList.size();
        for (int targetColNo = 0; targetColNo < targetColSize; targetColNo++) {
            ValueMetaData targetMetaData = targetColumnNameList.get(targetColNo);
            Object value = null;
            // 从源结果行获取数据
            ValueMetaData sourceMetaData = sourceColumnNameMap.get(targetMetaData.getName());
            if (sourceMetaData != null) {
                value = sourceColRows[sourceMetaData.getCloIndex() - 1];
            }
            int post = targetColNo + 1;
            ValueMetaInterface targetValueMetaInterface = targetMetaData.getValueMetaInterface();
            DatabaseUtil.setValue(targetStatement, targetValueMetaInterface, value, post);
            if (hasTargetId) {//如果有主键列表，则需要偏移位置
                DatabaseUtil.setValue(targetStatement, targetValueMetaInterface, value, post + targetColSize);
            }
        }
    }

    public String getTargetFullTableName() {
        return taskMeta.getTargetSchema() + "." + taskMeta.getTargetTableName();
    }
    /**
     * 获取统计结果
     * @return
     */
    public StatisticsData getTaskStatisticsData(){
        StatisticsData statisticsData = StatisticsData.getLocal();
        if(statisticsData != taskStatisticsData){//如果不是当前主任务线程请求获取，则复制数据
            statisticsData.getPreviousTaskStatistics().copy(taskPreviousTaskStatistics);
            statisticsData.getTaskStatistics().copy(taskStatistics);
        }
        return statisticsData;
    }

    public void addStatistics(SegmentWork thisSegmentWork,ExecuteStatistics addValue){
        STATISTICS_LOCK.lock();
        taskPreviousTaskStatistics.copy( taskStatistics );//把上一次的统计数据保存起来
        taskStatistics.addValue(addValue);//加入新的数据
        STATISTICS_LOCK.unlock();
        //暂时不在锁内处理事件，影响到其他线程进行统计数据
        //被通知到的，使用 getTaskStatisticsData() 获取即可。
        exeEvent(thisSegmentWork , EventType.TASK_STATISTICS_CHANGE);
    }

    /**
     * 添加监听事件
     * @param addEvent 事件
     * @param isCheckEventScope 是否需要判断事件实例模式
     */
    public void addTaskEvent(ListenEventInterface addEvent,boolean isCheckEventScope) throws Exception {
        if(addEvent == null || addEvent.getTypes() == null || addEvent.getTypes().size() == 0){
            return;
        }

        if(isCheckEventScope){// 判断实例模式
            EventScope scope = EventScope.get( addEvent.getScope() );
            if( EventScope.PROTOTYPE == scope ){
                addEvent = addEvent.getClass().newInstance();
            }
        }

        HashSet<EventType> types = addEvent.getTypes();
        for (EventType addType : types){
            List<ListenEventInterface> eventInterfaceList = listenEventMap.computeIfAbsent( addType , v -> new ArrayList<>() );
            eventInterfaceList.add(addEvent);
            //进行排序
            Collections.sort(eventInterfaceList, new Comparator<ListenEventInterface>() {
                @Override
                public int compare(ListenEventInterface o1, ListenEventInterface o2) {
                    return Integer.compare( o1.getOrder() , o2.getOrder() );
                }
            });
        }
    }

    /**
     * 执行监听事件
     * @param
     */
    public void exeEvent(EventType eventType){
        notifyEvent(this,null,eventType);
    }
    public void exeEvent(SegmentWork thisSegmentWork,EventType eventType){
        notifyEvent(this,thisSegmentWork,eventType);
    }
    public static void notifyEvent(DataTask thisTask, SegmentWork thisSegmentWork, EventType eventType){
        if(thisTask == null || eventType == null){
            return;
        }
        List<ListenEventInterface> eventInterfaceList = thisTask.listenEventMap.get(eventType);
        if(eventInterfaceList != null && eventInterfaceList.size() > 0){
            EventValueData rowData = EventRowUtil.getEventRowData();
            for (int i = 0; i < eventInterfaceList.size(); i++) {
                ListenEventInterface event = eventInterfaceList.get(i);
                if(!event.isStop()){
                    try {
                        event.exe(eventType,thisTask,thisSegmentWork,rowData);
                    }catch (Exception e){
                        LOG.error("执行监听事件失败！id=" + event.getId() +"，事件类型：" + eventType ,e);
                    }
                }
            }
        }
    }

    public void stopTask(){
        notifyTransStop = true;
        if(this.segmentWorkList != null && !this.segmentWorkList.isEmpty()){
            for (int i = 0; i < this.segmentWorkList.size(); i++) {
                SegmentWork work = this.segmentWorkList.get(i);
                work.notifyErrorStop();
            }
        }
    }

    public Exception getTaskError() {
        return taskError;
    }

    public long getQueryTotal() {
        return queryTotal;
    }

    public String[] getTargetIds() {
        return targetIds;
    }

    public int getDataQueueSize() {
        return dataQueueSize;
    }

    public List<SegmentWork> getSegmentWorkList() {
        return segmentWorkList;
    }

    public long getBeforeInputNum() {
        return beforeInputNum;
    }

    public long getAfterInputNum() {
        return afterInputNum;
    }

    public ColumnMetaData getColumnMetaData() {
        return columnMetaData;
    }

    public String getSourceConnKey() {
        return sourceConnKey;
    }

    public String getTargetConnKey() {
        return targetConnKey;
    }

    public String getRunTimeSql() {
        return runTimeSql;
    }

    public void setRunTimeSql(String runTimeSql) {
        this.runTimeSql = runTimeSql;
    }

    public Map<EventType, List<ListenEventInterface>> getListenEventMap() {
        return listenEventMap;
    }

    public TaskMeta getTaskMeta() {
        return taskMeta;
    }

    public String getTaskName() {
        return taskName;
    }

    public Object getParent() {
        return parent;
    }

    @Override
    public String toString() {
        return taskName;
    }
}
