package com.sl.cdc.core.config;/*
 * Copyright [2022] [https://www.xiaonuo.vip]
 *
 * Snowy采用APACHE LICENSE 2.0开源协议，您在使用过程中，需要注意以下几点：
 *
 * 1.请不要删除和修改根目录下的LICENSE文件。
 * 2.请不要删除和修改Snowy源码头部的版权声明。
 * 3.本项目代码可免费商业使用，商业使用请保留源码和相关描述文件的项目出处，作者声明等。
 * 4.分发源码时候，请注明软件出处 https://www.xiaonuo.vip
 * 5.不可二次分发开源参与同类竞品，如有想法可联系团队xiaonuobase@qq.com商议合作。
 * 6.若您的项目无法满足以上几点，需要更多功能代码，获取Snowy商业授权许可，请在官网购买授权，地址为 https://www.xiaonuo.vip
 */


import cn.hutool.core.collection.CollUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.sl.cdc.api.CdcLoaderApi;
import com.sl.cdc.api.domain.TaskInfo;
import com.sl.cdc.modular.debezium.CdcDebeziumManager;
import io.debezium.engine.ChangeEvent;
import io.debezium.engine.DebeziumEngine;
import io.debezium.engine.format.Json;
import lombok.RequiredArgsConstructor;
import org.apache.commons.compress.utils.Lists;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.core.annotation.Order;

import java.util.List;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * 业务相关配置
 *
 **/
@Configuration
@RequiredArgsConstructor
public class DebeziumCdcConfigure {


    @Autowired
    @Lazy
    private List<CdcLoaderApi> cdcLoaderApiList;


    @Bean
    public CdcDebeziumManager cdcDebeziumManager() {
        return new CdcDebeziumManager();
    }

    @Bean
    @Order(Integer.MAX_VALUE)

    public ApplicationRunner cdcStart() {
        CdcDebeziumManager cdcDebeziumManager = cdcDebeziumManager();

        return (args) -> {
            CompletableFuture.runAsync(() -> {
                if (CollUtil.isEmpty(cdcLoaderApiList)) {
                    return;
                }
                List<TaskInfo> loadList = Lists.newArrayList();
                for (CdcLoaderApi cdcLoaderApi : cdcLoaderApiList) {
                    List<TaskInfo> load = cdcLoaderApi.load();
                    loadList.addAll(load);
                }
                if (CollUtil.isEmpty(loadList)) {
                    return;
                }
                for (TaskInfo taskInfo : loadList) {
                    cdcDebeziumManager.addTask(taskInfo);
                }
                cdcDebeziumManager.start();
            });
        };
    }

    public static void main(String[] args) {

        String property = System.getProperty("user.home");

        // Define the configuration for the Debezium Engine with MySQL connector...
        final Properties props = new Properties();
        props.setProperty("name", "engine");
        props.setProperty("connector.class", "io.debezium.connector.mysql.MySqlConnector");
        props.setProperty("offset.storage", "org.apache.kafka.connect.storage.FileOffsetBackingStore");
        props.setProperty("offset.storage.file.filename", property + "/square_lake/tmp/offsets.dat");
        props.setProperty("offset.flush.interval.ms", "60000");
        props.setProperty("serverTimezone", "Asia/Shanghai");
        //* begin connector properties *//*
        props.setProperty("database.hostname", "101.35.209.150");
        props.setProperty("database.port", "3306");
        props.setProperty("database.user", "root");
        props.setProperty("database.password", "Baiyoujie@123");
        props.setProperty("database.server.id", "85744");
        props.setProperty("topic.prefix", "my-app-connector");
        props.setProperty("schema.history.internal",
                "io.debezium.storage.file.history.FileSchemaHistory");
        props.setProperty("schema.history.internal.file.filename",
                property + "/square_lake/tmp/schemahistory.dat");

        String str = "{\"connector.class\":\"io.debezium.connector.mysql.MySqlConnector\",\"database.user\":\"root\",\"offset.storage\":\"org.apache.kafka.connect.storage.FileOffsetBackingStore\",\"database.server.id\":\"57O757uf5pWw5o2u5rqQ\",\"dfdf\":\"sdfdsf\",\"database.port\":\"3306\",\"sdfsdf\":\"dgdfgsdf\",\"offset.flush.interval.ms\":\"60000\",\"schema.history.internal.file.filename\":\"/Users/idea/square_lake/tmp/系统数据源_101.35.209.150-to-系统数据源_101.35.209.150History.dat\",\"topic.prefix\":\"square-lake-sdf-connector\",\"schema.history.internal\":\"io.debezium.storage.file.history.FileSchemaHistory\",\"offset.storage.file.filename\":\"/Users/idea/square_lake/tmp/系统数据源_101.35.209.150-to-系统数据源_101.35.209.150Offsets.dat\",\"sdf\":\"gg\",\"database.hostname\":\"101.35.209.150\",\"database.password\":\"Baiyoujie@123\",\"name\":\"sdf\",\"serverTimezone\":\"Asia/Shanghai\"}";

        JSONObject entries = JSONUtil.parseObj(str);
        props.putAll(entries);

        DebeziumEngine<ChangeEvent<String, String>> engine = DebeziumEngine.create(Json.class)
                .using(props)
                .notifying(record -> {
                    System.out.println(record);
                }).build();
// Create the engine with this configuration ...
        // Run the engine asynchronously ...
        ExecutorService executor = Executors.newSingleThreadExecutor();
        executor.execute(engine);

        // Do something else or wait for a signal or an event
    }
  /*  @Bean
    io.debezium.config.Configuration debeziumConfig() {
        return io.debezium.config.Configuration.create()
                //  连接器的Java类名称
                .with("connector.class", SqlServerConnector.class.getName())
                // 偏移量持久化，用来容错 默认值
                .with("offset.storage", "org.apache.kafka.connect.storage.FileOffsetBackingStore")
                // 要存储偏移量的文件路径 默认/tmp/offsets.dat  如果路径配置不正确可能导致无法存储偏移量 可能会导致重复消费变更
                // 如果连接器重新启动，它将使用最后记录的偏移量来知道它应该恢复读取源信息中的哪个位置。
                .with("offset.storage.file.filename", offsetFileName)
                // 尝试提交偏移量的时间间隔。默认值为 1分钟
                .with("offset.flush.interval.ms", offsetTime)
                // 监听连接器实例的 唯一名称
                .with("name", instanceName)
                // SQL Server 实例的地址
                .with("database.hostname", ip)
                // SQL Server 实例的端口号
                .with("database.port", port)
                // SQL Server 用户的名称
                .with("database.user", username)
                // SQL Server 用户的密码
                .with("database.password", password)
                // 要从中捕获更改的数据库的名称
                .with("database.dbname", includeDb)
                // 是否包含数据库表结构层面的变更 默认值true
                .with("include.schema.changes", "false")
                // Debezium 应捕获其更改的所有表的列表
                .with("table.include.list", includeTable)
                // SQL Server 实例/集群的逻辑名称，形成命名空间，用于连接器写入的所有 Kafka 主题的名称、Kafka Connect 架构名称以及 Avro 转换器时对应的 Avro 架构的命名空间用来
                .with("database.server.name", logicName)


            .with("table.whitelist", tableWhitelist)
            .with("column.include.list", "MYDB.PM_PROJECT_INFO.PRO_ID,MYDB.PM_PROJECT_INFO.PRO_CODE")

                // 负责数据库历史变更记录持久化Java 类的名称
                .with("database.history", "io.debezium.relational.history.FileDatabaseHistory")
                // 历史变更记录存储位置，存储DDL
                .with("database.history.file.filename", historyFileName)
                .build();
    }*/

}
