package org.lk.common;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.StrUtil;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.DebeziumSourceFunction;
import lombok.Data;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.lk.common.factory.DsCreateFactory;
import org.lk.common.factory.StreamReader;
import org.lk.common.log.CdcFileAppender;
import org.lk.common.log.CdcJobContext;
import org.lk.common.writer.StreamSinkFunction;
import org.lk.config.SpringUtil;
import org.lk.entity.CdcJob;
import org.lk.entity.CdcJobTable;
import org.lk.entity.CdcLog;
import org.lk.entity.DataSourceEntity;
import org.lk.mapper.CdcJobMapper;
import org.lk.mapper.CdcLogMapper;
import org.lk.mapper.DataSourceEntityMapper;
import org.lk.util.ResponseResult;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.io.File;
import java.io.FileFilter;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

/**
 * @author: lk
 * @since:
 */
@Component
@Data
public class FlinkCdcStreamManager {


    private Map<String, JobClient> map = new ConcurrentHashMap<>();
    private List<CdcJobTable> cdcJobTableList;
    private CdcJob cdcJob;
    @Value("${flink.savepointDirectory:savepoints}")
    String savepointDirectory;

    @Value("${flink.savepointPrefix}")
    String savepointPrefix;
    @Value("${flink.checkpointDirectory}")
    String checkpointDirectory;


    private String jobId;

    @Resource
    private DataSourceEntityMapper dataSourceEntityMapper;
    @Resource
    private CdcLogMapper cdcLogMapper;


    public void start() {
        CdcLog cdcLog = new CdcLog();
        try {
            initLog(cdcLog);
            startJob(cdcLog);
        } catch (Exception e) {
            cdcLog.setHandlerCode(500);
            cdcLog.setHandlerMsg(e.getMessage());
            cdcLogMapper.insert(cdcLog);
        }

    }

    private void startJob(CdcLog cdcLog) throws Exception {
        DataSourceEntity originDataSourceEntity = dataSourceEntityMapper.selectById(cdcJob.getOriginDsId());
        DataSourceEntity targetDataSourceEntity = dataSourceEntityMapper.selectById(cdcJob.getTargetDsId());
        // 是否全库
        String tableAll = cdcJob.getHasAllTable() ? "*" : cdcJobTableList.stream().map(CdcJobTable::getTableName).collect(Collectors.joining(","));
        // 是否全量或者增量
//        StartupOptions startupOptions = !cdcJob.getHasFirstSync() ? StartupOptions.initial() : StartupOptions.specificOffset(cdcJob.getBinlogFileName(), Integer.parseInt(cdcJob.getSavePoint()));
        StreamReader streamReader = DsCreateFactory.get(originDataSourceEntity);
        // 得到cdc对象
        DebeziumSourceFunction<String> debeziumSourceFunction = streamReader.readerSourceCdc(Arrays.asList(tableAll.split(",")), StartupOptions.initial());
        Configuration configuration = new Configuration();

        String checkOrSavePointFilePath = getCheckOrSavePointFilePath();
        if (StrUtil.isNotEmpty(checkOrSavePointFilePath)) {
            configuration.setString("execution.savepoint.path", checkOrSavePointFilePath);
        }
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        env.enableCheckpointing(10000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.setStateBackend(new EmbeddedRocksDBStateBackend());
        env.getCheckpointConfig().setCheckpointStorage(new FileSystemCheckpointStorage("file:///" + checkpointDirectory));
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
        env.getCheckpointConfig().setCheckpointTimeout(10000);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        env.addSource(debeziumSourceFunction).setParallelism(1).addSink(new StreamSinkFunction(cdcJob, targetDataSourceEntity, cdcLog)).setParallelism(1);
        // 启动cdc
        JobClient jobClient = env.executeAsync(cdcJob.getJobName());
        String jobId = jobClient.getJobID().toString();
        CdcJobMapper cdcJobMapper = SpringUtil.getBean(CdcJobMapper.class);
        cdcJob.setJobId(jobId);
        cdcJobMapper.updateById(cdcJob);
        //将本次任务名称保存的map中便于关闭此次任务
        map.put(cdcJob.getJobName(), jobClient);
    }

    private String getCheckOrSavePointFilePath() {
        String filePath = null;
        String jobJobId = cdcJob.getJobId();
        if (StrUtil.isNotEmpty(jobJobId)) {
            String path = checkpointDirectory + FileUtil.FILE_SEPARATOR + jobJobId;
            if (FileUtil.exist(path)) {
                filePath = getFilePath(path);
            }
            path = savepointDirectory + FileUtil.FILE_SEPARATOR;
            if (FileUtil.exist(path)) {
                // 取jobId 的前6位和后18位
                filePath = getSavePointPath(path,jobJobId.substring(0, 6));
            }
        }
        return filePath;
    }

    private void initLog(CdcLog cdcLog) {
        Date triggerTime = new Date();
        cdcLog.setTriggerTime(triggerTime);
        cdcLog.setJobId(cdcJob.getId());
        cdcLog.setJobName(cdcJob.getJobName());
        // 先保存文件然后得到cdcLog的ID
        cdcLogMapper.insert(cdcLog);
        String makeLogFileName = CdcFileAppender.makeLogFileName(triggerTime, cdcLog.getId());
        CdcJobContext.setXxlJobContext(new CdcJobContext(makeLogFileName));
    }


    public ResponseResult stop(String jobName) {
        JobClient jobClient = map.get(jobName);
        // 停止正在运行的Flink作业并同时保存断点
        // 表示是否应该完全终止作业，如果为true，则不保证创建保存点，false则保证创建保存点。
        // 第二个参数表示保存的路径
        try {
            jobClient.stopWithSavepoint(false, savepointDirectory);
        } catch (Exception e) {
            return ResponseResult.fail(e.getMessage());
        }
        return ResponseResult.success();
    }

    private String getFilePath(String path) {
        File filePath = new File(path);
        File[] files = filePath.listFiles(pathname -> pathname.getName().toLowerCase().startsWith("chk-"));
        return files.length > 0 ? files[0].getPath() : null;
    }

    private String getSavePointPath(String path, String cdcJobIdPreFix) {
        File filePath = new File(path);
        File[] files = filePath.listFiles();
        for (File file : files) {
            boolean b = file.getName().startsWith(savepointPrefix + cdcJobIdPreFix + "-");
            if (b) {
                return file.getPath();
            }
        }
        return null;
    }

    public static void main(String[] args) {


        String jobJobId = "dfef9eb4382975071c76ceeefd67ecfd";

        String prefix = jobJobId.substring(0, 6);
        String str = "E://savepoint" + FileUtil.FILE_SEPARATOR;
//        String path = "E://savepoint"+FileUtil.FILE_SEPARATOR+"savepoint-"+prefix + "-";
        File filePath = new File(str);
        File[] files = filePath.listFiles();
        for (File file : files) {
            boolean b = file.getName().startsWith("savepoint-" + prefix + "-");
            if (b) {
                System.out.println("aa" + file.getPath());
            }
        }

    }

}
