package net.xuele.learn.flink.cdc;


import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Author patrick
 * @Date 2023/7/10 13:41
 * @Description
 */
public class Test {
    public static void main(String[] args) throws Exception {
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("localhost")
                .port(3306)
                .databaseList("casb") // 设置捕获的数据库， 如果需要同步整个数据库，请将 tableList 设置为 ".*".
                .tableList("casb.authConfig") // 设置捕获的表
                .username("root")
                .password("123456")
                .deserializer(new JsonDebeziumDeserializationSchema()) // 将 SourceRecord 转换为 JSON 字符串
                .build();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 设置 3s 的 checkpoint 间隔
        env.enableCheckpointing(3000);

        env
                .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source")
                // 设置 source 节点的并行度为 4
                .setParallelism(4)
                .writeAsText("output.txt")
                .setParallelism(1); // 设置 sink 节点并行度为 1

        env.execute("Print MySQL Snapshot + Binlog");

//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
//        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
//        // 配置ClickHouse连接信息
//        tableEnv.executeSql("CREATE TABLE clickhouse_table (\n" +
//                "  id INT,\n" +
//                "  name STRING\n" +
//                ") WITH (\n" +
//                "  'connector' = 'clickhouse-cdc',\n" +
//                "  'url' = 'clickhouse://localhost:8123',\n" +
//                "  'username' = 'your_username',\n" +
//                "  'password' = 'your_password',\n" +
//                "  'database-name' = 'your_database',\n" +
//                "  'table-name' = 'your_table',\n" +
//                "  'debezium.snapshot.mode' = 'initial'\n" +
//                ")");
//        // 创建Elasticsearch连接配置
//        Map<String, String> config = new HashMap<>();
//        config.put("cluster.name", "your_cluster_name");
//        config.put("bulk.flush.max.actions", "1");
//        // 创建Elasticsearch Sink
//        List<HttpHostConfig> httpHosts = new ArrayList<>();
//        httpHosts.add(new HttpHostConfig("localhost", 9200, "http"));
//        ElasticsearchSinkConfig.Builder<Row> esSinkBuilder = new ElasticsearchSinkConfig.Builder<>(httpHosts, new ElasticsearchSinkFunction<Row>() {
//            @Override
//            public void process(Row row, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
//                Map<String, Object> json = new HashMap<>();
//                json.put("id", row.getField(0));
//                json.put("name", row.getField(1));
//                IndexInfo indexInfo = new IndexInfo("your_index", "your_type", "your_id");
//                SinkDocument document = new SinkDocument(json, indexInfo);
//                requestIndexer.add(document);
//            }
//        });
//        esSinkBuilder.setBulkFlushMaxActions(1);
//        esSinkBuilder.setBulkFlushInterval(1000);
//        esSinkBuilder.setBulkFlushBackoff(true);
//        esSinkBuilder.setBulkFlushBackoffRetries(3);
//        esSinkBuilder.setSinkMode(SinkMode.UPSERT);
//        esSinkBuilder.setBulkFlushConfig(new BulkFlushConfig());
//        ElasticsearchSink<Row> elasticsearchSink = new ElasticsearchSink<>(esSinkBuilder.build(), config);
//        // 将ClickHouse数据转化为DataStream
//        Table clickHouseTable = tableEnv.from("clickhouse_table");
//        DataStream<Row> dataStream = tableEnv.toAppendStream(clickHouseTable, Row.class);
//        // 将数据写入Elasticsearch
//        dataStream.addSink(elasticsearchSink);
//        // 执行作业
//        env.execute("CDC Job");
    }
}
