package com.intct.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author gufg
 * @since 2025-11-13 08:33
 */
public class MysqlToKafka {
    public static void main(String[] args) {
        // 1. 初始化流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000L, org.apache.flink.streaming.api.CheckpointingMode.EXACTLY_ONCE);

        // 2. 初始化表执行环境（适配 Flink 1.15+）
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        // 3. 注册 MySQL CDC 源表（保持不变，会产生 INSERT/UPDATE/DELETE 变更）
        String createCdcSource = "CREATE TABLE mysql_t1_cdc (" +
                "  id STRING," +  // 替换为 t1 表实际字段（需与主键一致）
                "  name STRING," +
                "  PRIMARY KEY (id) NOT ENFORCED" + // 必需：CDC 源表需主键才能正确捕获 UPDATE/DELETE
                ") WITH (" +
                "  'connector' = 'mysql-cdc'," +
                "  'hostname' = 'cdh-node'," +
                "  'port' = '13306'," +
                "  'username' = 'root'," +
                "  'password' = 'Test_090110'," +
                "  'database-name' = 'm1'," +
                "  'table-name' = 'test01'," +
                "  'scan.startup.mode' = 'initial'" +
                ")";
        tableEnv.executeSql(createCdcSource);

        // 4. 注册 Kafka Upsert Sink 表（核心修正：支持 UPDATE/DELETE）
        String createKafkaSink = "CREATE TABLE kafka_c8601 (" +
                "  id STRING," +
                "  name STRING," +
                "  PRIMARY KEY (id) NOT ENFORCED" + // 必需：通过主键匹配更新/删除数据
                ") WITH (" +
                "  'connector' = 'kafka'," +
                "  'properties.bootstrap.servers' = 'cdh-node:9092'," +
                "  'properties.transaction.timeout.ms' = '90000'," +  // 事务超时间
                "  'topic' = 'c8603'," +
                "  'format' = 'debezium-json'," + // 必需：Debezium JSON 格式包含变更类型（op字段）
                "  'sink.delivery-guarantee' = 'exactly-once'," +  // 精准一语义
                "  'sink.transactional-id-prefix' = 'cdc-c8603-id-'," +  // 事务ID前缀
                "  'value.format' = 'debezium-json'" + // 显式指定值格式，适配 Upsert 语义
                ")";
        tableEnv.executeSql(createKafkaSink);

        // 5. 同步数据（CDC 全量+增量变更 → Kafka，支持 INSERT/UPDATE/DELETE）
        tableEnv.executeSql("INSERT INTO kafka_c8601 SELECT id, name FROM mysql_t1_cdc");

        //tableEnv.executeSql("SELECT * FROM mysql_t1_cdc").print();
    }
}