package com.etl.dataflow.scheduler.admin.schedule;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.http.HttpResponse;
import cn.hutool.http.HttpUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.etl.dataflow.common.entity.ExecutorInfo;
import com.etl.dataflow.common.entity.JdbcProperties;
import com.etl.dataflow.common.entity.TaskInfo;
import com.etl.dataflow.common.entity.TaskParam;
import com.etl.dataflow.common.enums.TaskStateEnum;
import com.etl.dataflow.common.exception.EtlException;
import com.etl.dataflow.common.exception.ServiceException;
import com.etl.dataflow.common.response.Code;
import com.etl.dataflow.common.util.ExceptionUtil;
import com.etl.dataflow.common.util.ResponseUtil;
import com.etl.dataflow.scheduler.admin.entity.DataflowDatasource;
import com.etl.dataflow.scheduler.admin.entity.DataflowTaskInfo;
import com.etl.dataflow.scheduler.admin.entity.DataflowTaskRecord;
import com.etl.dataflow.scheduler.admin.mapper.DataflowDatasourceMapper;
import com.etl.dataflow.scheduler.admin.mapper.DataflowTaskRecordMapper;
import com.etl.dataflow.scheduler.communication.ExecutorStateChecker;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * 任务并发管理器
 * 用于控制哪些任务可以同时运行，起到任务数量削峰的作用
 * 防止一瞬间大量任务启动导致执行器压力过大
 *
 * @author dx
 * @since 2022/6/22
 */
@Slf4j
@Component
public class TaskConcurrencyManager {

    private static final ExecutorService EXEC_ALL_TASK_WORKERS = new ThreadPoolExecutor(
            1, 1, 60, TimeUnit.SECONDS,
            new LinkedBlockingDeque<>(),
            new ThreadFactory() {
                final AtomicInteger i = new AtomicInteger();

                @Override
                public Thread newThread(Runnable r) {
                    return new Thread(r, "task-runner-thread-" + i.getAndIncrement());
                }
            }, (r, executor) -> log.warn("超出线程池大小，执行全部任务被拒绝"));
    private static final ExecutorService DAO_EXECUTOR = Executors.newSingleThreadExecutor();
    private final BlockingQueue<DataflowTaskRecord> taskRecordQueue = new LinkedBlockingDeque<>();
    private final Map<Long, TaskInfo> taskInfoMap = new ConcurrentHashMap<>();
    private final DataflowTaskRecordMapper taskRecordMapper;
    private final DataflowDatasourceMapper datasourceMapper;
    private final ExecutorStateChecker executorStateChecker;
    /**
     * 最大并发任务数量
     */
    @Value("${scheduler.max-task-num}")
    private int maxTaskNum;

    public TaskConcurrencyManager(DataflowTaskRecordMapper taskRecordMapper, DataflowDatasourceMapper datasourceMapper, ExecutorStateChecker executorStateChecker) {
        this.taskRecordMapper = taskRecordMapper;
        this.datasourceMapper = datasourceMapper;
        this.executorStateChecker = executorStateChecker;
    }

    @PostConstruct
    private void init() {
        EXEC_ALL_TASK_WORKERS.execute(() -> {
            //noinspection InfiniteLoopStatement
            while (true) {
                try {
                    dequeueTaskAndRun();
                } catch (Throwable e) {
                    e.printStackTrace();
                }
            }
        });
    }
    private void dequeueTaskAndRun() {
        // 限制同时运行的任务最多是maxTaskNum个
        int runningTaskNum = 0;
        Collection<ExecutorInfo> onlineExecutors = executorStateChecker.getOnlineExecutors();
        // 计算运行中任务数量
        for (ExecutorInfo onlineExecutor : onlineExecutors) {
            runningTaskNum += onlineExecutor.getTaskList().size();
        }
        if (runningTaskNum < maxTaskNum) {
            DataflowTaskRecord taskRecord;
            try {
                // 阻塞等待新任务
                taskRecord = taskRecordQueue.take();
            } catch (InterruptedException e) {
                e.printStackTrace();
                log.error(ExceptionUtil.getFullStackTrace(e));
                try {
                    TimeUnit.SECONDS.sleep(1L);
                } catch (InterruptedException ignored) {}
                return;
            }

            Optional<ExecutorInfo> optimalExecutor = executorStateChecker.getOptimalExecutor();
            try {
                while (!optimalExecutor.isPresent()) {
                    log.debug("未找到在线的执行器，等待执行器上线...");
                    // 阻塞等待执行器上线
                    executorStateChecker.awaitUntilExecutorOnline();
                    optimalExecutor = executorStateChecker.getOptimalExecutor();
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            // 将任务交给执行器
            if (optimalExecutor.isPresent()) {
                ExecutorInfo executorInfo = optimalExecutor.get();
                TaskInfo taskInfo = taskInfoMap.get(taskRecord.getTaskInfoId());
                taskInfoMap.remove(taskRecord.getTaskInfoId());
                taskInfo.setExecutorInfo(executorInfo);
                taskRecord.setExecutorUuid(executorInfo.getUuid());
                taskRecord.setLaunchTime(new Date());
                taskRecord.setTaskState(TaskStateEnum.RUNNING.getValue());
                taskInfo.setState(TaskStateEnum.RUNNING);
                // 执行任务
                execTask(taskRecord, taskInfo);
                // 更新任务状态
                DAO_EXECUTOR.submit(() -> updateTaskRecord(taskRecord));
            }
        }
    }

    private void updateTaskRecord(DataflowTaskRecord taskRecord) {
        // 更新任务记录状态，检查结束时间不为空才更新，避免执行器任务结束后通知调度中心更新了任务记录信息，这里反而将任务信息更新为运行中的状态
        UpdateWrapper<DataflowTaskRecord> idEqAndEndTimeIsNull = new UpdateWrapper<DataflowTaskRecord>()
                .eq("id", taskRecord.getId())
                .isNull("end_time");
        int updated = taskRecordMapper.update(taskRecord, idEqAndEndTimeIsNull);
        // 更新失败说明执行器已经在此之前通知调度器更改任务状态为成功或者失败了，如果是成功则errorLog字段为空，这里需要填充为“成功”
        // 如果失败的话errorLog肯定不为空，这里要避免覆盖它，只需要把启动时间补充上即可
        if (updated == 0) {
            DataflowTaskRecord dataflowTaskRecord = taskRecordMapper.selectById(taskRecord.getId());
            DataflowTaskRecord newTaskRecord = new DataflowTaskRecord();
            newTaskRecord.setId(taskRecord.getId());
            boolean needUpdate = false;
            if (dataflowTaskRecord.getLaunchTime() == null) {
                newTaskRecord.setLaunchTime(taskRecord.getLaunchTime());
                needUpdate = true;
            }
            if (dataflowTaskRecord.getErrorLog() == null) {
                newTaskRecord.setErrorLog("成功");
                needUpdate = true;
            }
            if (needUpdate) {
                taskRecordMapper.updateById(newTaskRecord);
            }
        }
    }

    private void execTask(DataflowTaskRecord taskRecord, TaskInfo taskInfo) {
        taskInfo.setTaskRecordId(taskRecord.getId() + "");
        JSONObject execResult = this.executeTask(taskInfo);
        boolean executeSuccess = ResponseUtil.isSuccess(execResult);

        taskRecord.setErrorLog((String) execResult.get("msg"));
        if (executeSuccess) {
            taskRecord.setTaskState(TaskStateEnum.RUNNING.getValue());
            taskInfo.setState(TaskStateEnum.RUNNING);
            executorStateChecker.addTaskToExecutor(taskInfo);
        } else {
            taskRecord.setEndTime(new Date());
            taskRecord.setTaskState(TaskStateEnum.FATAL_ERROR.getValue());
        }
    }

    private JSONObject executeTask(TaskInfo taskInfo) {
        ExecutorInfo executorInfo = taskInfo.getExecutorInfo();
        String execUrl = getExecUrl(executorInfo);
        String body;
        try (HttpResponse httpResponse = HttpUtil
                .createPost(execUrl)
                .body(JSONUtil.toJsonStr(taskInfo), "application/json")
                .execute()) {
            body = httpResponse.body();
        }
        return JSONUtil.parseObj(body);
    }

    private String getExecUrl(ExecutorInfo executorInfo) {
        return executorInfo.getExecutorUrl() + "/task/run";
    }

    /**
     * 任务是否已经在等待队列中
     *
     * @param taskId 任务id
     * @return 是否存在
     */
    public boolean isTaskWaiting(Long taskId) {
        return taskInfoMap.containsKey(taskId);
    }

    public void submitTask(DataflowTaskInfo dataflowTaskInfo) {
        if (this.isTaskWaiting(dataflowTaskInfo.getId())) {
            throw new EtlException("任务重复提交: " + dataflowTaskInfo.getName());
        }

        // 生成任务运行记录插入数据库
        DataflowTaskRecord taskRecord = new DataflowTaskRecord();
        taskRecord.setId(IdWorker.getId());
        taskRecord.setTaskInfoId(dataflowTaskInfo.getId());
        taskRecord.setExecutorUuid(null);
        taskRecord.setTaskState(TaskStateEnum.WAITING.getValue());
        taskRecord.setLaunchTime(null);
        taskRecord.setCreatorId(123L);
        taskRecordMapper.insert(taskRecord);


        // 生成任务基本信息放入map, 供任务消费者使用
        TaskParam taskParam = new TaskParam();
        BeanUtil.copyProperties(dataflowTaskInfo, taskParam);

        // 复制数据源配置到taskParam
        copyJdbcDatasourceForTaskParam(dataflowTaskInfo, taskParam);

        TaskInfo taskInfo = new TaskInfo();
        taskInfo.setId(dataflowTaskInfo.getId() + "");
        taskInfo.setTaskParam(taskParam);
        taskInfo.setExecutorInfo(null);
        taskInfo.setState(TaskStateEnum.WAITING);
        // 设置拦截器BeanName
        setInterceptorForTaskInfo(dataflowTaskInfo, taskInfo);
        taskInfoMap.put(dataflowTaskInfo.getId(), taskInfo);

        taskRecordQueue.add(taskRecord);
    }

    private void setInterceptorForTaskInfo(DataflowTaskInfo taskInfoEntity, TaskInfo taskInfo) {
        taskInfo.setTaskLaunchInterceptorBeanName(taskInfoEntity.getLaunchInterceptor());
        taskInfo.setAfterReadInterceptorBeanName(taskInfoEntity.getAfterReadInterceptor());
        taskInfo.setAfterWriteInterceptorBeanName(taskInfoEntity.getAfterWriteInterceptor());
    }

    private void copyJdbcDatasourceForTaskParam(DataflowTaskInfo taskInfoEntity, TaskParam taskParam) {
        DataflowDatasource srcDatasource = datasourceMapper.selectById(taskInfoEntity.getSrcDsId());
        if (srcDatasource == null) {
            throw new ServiceException(Code.SRC_DATASOURCE_NOT_EXISTS);
        }
        DataflowDatasource dstDatasource = datasourceMapper.selectById(taskInfoEntity.getDstDsId());
        if (dstDatasource == null) {
            throw new ServiceException(Code.DST_DATASOURCE_NOT_EXISTS);
        }
        JdbcProperties srcJdbcProperties = new JdbcProperties();
        JdbcProperties dstJdbcProperties = new JdbcProperties();
        BeanUtil.copyProperties(srcDatasource, srcJdbcProperties);
        BeanUtil.copyProperties(dstDatasource, dstJdbcProperties);
        srcJdbcProperties.setInstanceName(srcDatasource.getSchemaName());
        dstJdbcProperties.setInstanceName(dstDatasource.getSchemaName());
        taskParam.setSrc(srcJdbcProperties);
        taskParam.setDst(dstJdbcProperties);
    }
}
