package com.intct.flink.project.sql;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author gufg
 * @since 2025-10-07 07:40
 * 测试数据：
 * INSERT INTO `m1`.`test01` (`id`, `name`, `update_time`, `price`) VALUES (19, '李四', 1111111, 1000.00000);
 *
 * update `m1`.`test01` set price = 500 where id = 18
 *
 * INSERT INTO `m1`.`test01` (`id`, `name`, `update_time`, `price`) VALUES (20, '王五', 22222, 1000.00000);
 *
 * delete from  `m1`.`test01` where id = 19;
 *
 * update `m1`.`test01` set price = 100 where id = 18
 *
 * INSERT INTO `m1`.`test01` (`id`, `name`, `update_time`, `price`) VALUES (21, '赵六', 33333333, 1000.00000);
 */
public class CdcToKafkaTestApp {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/test/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        tenv.executeSql(
                "CREATE TABLE order_mysql (    " +
                        " id            INT ,             " +
                        " name          STRING ,          " +
                        " price         DECIMAL(10, 5),   " +
                        " create_time   timestamp(3)  ,      " +
                        " update_time   bigint  ,      " +
                        "     PRIMARY KEY (id) NOT ENFORCED         " +
                        "     ) WITH (                              " +
                        "     'connector' = 'mysql-cdc',            " +
                        "     'hostname' = 'cdh-node'   ,            " +
                        "     'port' = '13306'          ,            " +
                        "     'username' = 'root'      ,            " +
                        "     'password' = 'Test_090110'      ,            " +
                        "     'scan.startup.mode' = 'latest-offset', " +
                        "     'database-name' = 'm1' ,            " +
                        "     'table-name' = 'test01'            " +
                        ")"
        );

        // 4 映射kafka
        tenv.executeSql(
                "CREATE TABLE sink_kakfa (\n" +
                        " id            INT ,             " +
                        " name          STRING ,          " +
                        " price         DECIMAL(10, 5),   " +
                        " create_time   timestamp(3)  ,      " +
                        " update_time   bigint  ,      " +
                        " PRIMARY KEY(id) NOT ENFORCED\n" +
                        ") WITH (\n" +
                        "  'connector' = 'upsert-kafka',\n" +
                        "  'topic' = 'sql_topic',\n" +
                        "  'properties.bootstrap.servers' = 'cdh-node:9092',\n" +
                        "  'key.format' = 'json',\n" +
                        "  'value.format' = 'json'\n" +
                        ")"
        );

        // 5 sink kakfak
        tenv.executeSql("insert into sink_kakfa select * from order_mysql");


    }
}