package com.ot.flink.cdc.function;

import com.ot.flink.cdc.biz.domain.CdcTask;
import com.ot.flink.cdc.common.TaskStatusEnum;
import com.ot.flink.cdc.config.CdcJdbcConfig;
import com.ot.flink.cdc.function.mapper.MapperFactory;
import com.ot.flink.cdc.function.sink.SinkFactory;
import com.ot.flink.cdc.function.source.CdcSourceFactory;
import com.ot.flink.cdc.util.EnvUtil;
import com.ot.flink.cdc.util.SpringUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import java.time.Duration;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;

/**
 * holder
 * @author: admin
 * @date: 2024/5/24
 * @since JDK1.8 version: 1.0.0
 */
@Component
public class CdcHolder {

    /** 日志 */
    private static Logger logger = LoggerFactory.getLogger(CdcRunner.class);

    /** 任务列表 */
    public static final Map<String, CdcTask> tasks = Collections.synchronizedMap(new LinkedHashMap<>());

    private StreamExecutionEnvironment env;

    private JobClient jobClient;

    public void initEvn() {
        logger.info("初始化Flink Env");
        env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.disableOperatorChaining();
        env.enableCheckpointing(30L);
        // 配置checkpoint 超时时间
        env.getCheckpointConfig().setCheckpointTimeout(Duration.ofMinutes(60).toMillis());
        //指定 CK 的一致性语义
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //设置任务关闭的时候保留最后一次 CK 数据
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        // 避免扫描快照超时
        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(100);
        env.getCheckpointConfig().setCheckpointInterval(Duration.ofMinutes(10).toMillis());
        // 指定从 CK 自动重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 2000L));
        //设置状态后端
        env.setStateBackend(new HashMapStateBackend());

        env.getCheckpointConfig().setCheckpointStorage(
                new FileSystemCheckpointStorage("file:///D:/CODE/my/pg-wal-listen/checkpoints/"));
        EnvUtil.initEnv(env);
    }

    public void start() {
        logger.info("flink cdc 开始运行");
        CdcTask taskInfo = new CdcTask();
        taskInfo.setName("main");
        try {
            startFlinkEngine();
            taskInfo.setJobId(this.jobClient.getJobID().toString());
            taskInfo.setStatus(TaskStatusEnum.RUNNING.getCode());
            taskInfo.setStatusDesc(TaskStatusEnum.RUNNING.getDesc());
            taskInfo.setJobClient(this.jobClient);
        } catch (Exception e) {
            logger.error("", e);
            taskInfo.setStatus(TaskStatusEnum.STOP_ERROR.getCode());
            taskInfo.setStatusDesc(TaskStatusEnum.STOP_ERROR.getDesc());
            taskInfo.setMark(e.getMessage());
        }
        tasks.put(taskInfo.getName(), taskInfo);
    }

    /**
     * @Description: 启动引擎
     * @author: admin
     * @date: 2024/4/19
     */
    private void startFlinkEngine() throws Exception {

        CdcJdbcConfig jdbcConfig = SpringUtil.getBean(CdcJdbcConfig.class);
        final String jdbcType = jdbcConfig.getType();
        DataStreamSource<String> streamSource = null;
        if (StringUtils.endsWithIgnoreCase(jdbcType, "mysql")) {
            streamSource = env.fromSource(CdcSourceFactory.createMysqlSource(), WatermarkStrategy.forMonotonousTimestamps(), "MysqlSource");
        } else if (StringUtils.endsWithIgnoreCase(jdbcType, "postgres")) {
            streamSource = env.addSource(CdcSourceFactory.createPostgresSource());
        } else {
            logger.error("不支持的数据库类型：{}", jdbcType);
            return;
        }
        streamSource.map(MapperFactory.getInstance()).addSink(SinkFactory.getInstance2());
        //
        this.jobClient = env.executeAsync();
    }

    public void close() {
        if (null != this.jobClient) {
            this.jobClient.cancel();
        }
        logger.info("flink cdc 取消运行");
    }

    public JobClient getJobClient() {
        return jobClient;
    }

}
