package com.intct.flink.cdc.stream;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author gufg
 * @since 2025-07-16 11:12
 */
public class CDCSQLTest {
    public static void main(String[] args) throws Exception {
        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        // 使用FlinkSQL时，为获取数据实时变化，需要开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
 
        // 定义临时表
        tEnv.executeSql("create table flink_test01(" +
                " id int" +
                ", name string" +
                ", age int" +
                ", PRIMARY KEY(id) NOT ENFORCED" +
                ")" +
                " with (" +
                "  'connector' = 'mysql-cdc'," +
                "  'hostname' = 'cdh-node'," +
                "  'port' = '3306'," +
                "  'username' = 'root'," +
                "  'password' = 'Test_090110'," +
                "  'database-name' = 'm1'," +
                "  'table-name' = 'test01'," +
                "  'scan.startup.mode' = 'initial')");

        // 执行查询
        Table table = tEnv.sqlQuery("select * from flink_test01");

//        // 输出
        table.execute().print();

//        // kafka临时表
//        tEnv.executeSql("CREATE TABLE kafak_sink (" +
//                "  id int," +
//                "  name string," +
//                "  age int," +
//                "  PRIMARY KEY (id) NOT ENFORCED" +
//                ") WITH (" +
//                "  'connector' = 'upsert-kafka'," +
//                "  'topic' = 'my-tpoic'," +
//                "  'properties.bootstrap.servers' = 'cdh-node:9092'," +
//                "  'key.format' = 'json'," +
//                "  'value.format' = 'json'" +
//                ")");
//
//        // 写出方法1：
////        table.executeInsert("kafak_sink");
//
//        // 写出方法2: insert into kafak_sink select * from flink_test01
//        tEnv.executeSql("insert into kafak_sink select * from flink_test01");

        // 作业：
            // 要求：1 数据源采用mysql cdc，生成临时表
            // 要求：2 数据源采用kafka，生成临时表
            // 要求：3 将 mysql cdc源与kafak源进行关联查询
            // 要求：4 将关联之后的数据，写入Kafka
    }
}
