package com.atguigu.gmall.realtime.demo;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author caodan
 * @version 1.0
 * @description mysql -- flinkcdc -- kafka -- clickhouse  实现数据流转
 * @date 2025-08-13 19:35
 */
public class FlinkCDC2mysql2kafka {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql("CREATE TABLE person (\n" +
                "  id INT,\n" +
                "  username STRING,\n" +
                "  age INT,\n" +
                "  PRIMARY KEY(id) NOT ENFORCED\n" +
                ") WITH (\n" +
                "  'connector' = 'mysql-cdc',\n" +
                "  'hostname' = 'hadoop102',\n" +
                "  'port' = '3306',\n" +
                "  'username' = 'root',\n" +
                "  'password' = '000000',\n" +
                "  'database-name' = 'gmall',\n" +
                "  'table-name' = 'person'\n" +
                ")");

        tableEnv.executeSql("CREATE TABLE user_kafka_sink (\n" +
                "     id INT,\n" +
                "     username STRING,\n" +
                "     age INT,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = 'test',\n" +
                "  'properties.bootstrap.servers' = '192.168.10.102:9092',\n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                ")");

        tableEnv.executeSql("insert into\n" +
                "  user_kafka_sink\n" +
                "select\n" +
                "  *\n" +
                "from\n" +
                "  person");

        tableEnv.executeSql("CREATE TABLE user_kafka_source (\n" +
                "     id INT,\n" +
                "     username STRING,\n" +
                "     age INT\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'test',\n" +
                "  'properties.bootstrap.servers' = '192.168.10.102:9092',\n" +
                "  'properties.group.id' = 'testGroup',\n" +
                "  'scan.startup.mode' = 'group-offsets',\n" +
                "  'format' = 'json'\n" +
                ")");
        // 使用 clickhouse jdbc
        /*Table table = tableEnv.sqlQuery("select id,username,age from user_kafka_source");

        DataStream<Person> personDataStream = tableEnv.toAppendStream(table, Person.class);

//        personDataStream.print();
        personDataStream.addSink(
                ClickHouseUtil.getJdbcSink("insert into person values(?,?,?)")
        );

        env.execute();*/
        //使用 clickhouse 连接器
        tableEnv.executeSql("CREATE TABLE ch_sink (\n" +
                "    id INT,\n" +
                "    username STRING,\n" +
                "    age INT,\n" +
                "    PRIMARY KEY (id) NOT ENFORCED\n" +
                ") WITH (\n" +
                "    'connector' = 'clickhouse',\n" +
                "    'url' = 'clickhouse://192.168.10.103:8123',\n" +
                "    'database-name' = 'default',\n" +
                "    'table-name' = 'person',\n" +
                "    'sink.max-retries' = '3'\n" +
                ")");
        tableEnv.executeSql("insert into ch_sink select * from user_kafka_source");

    }
}
