package com.zhisheng.connectors.kafka;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;

public class FlinkKafkaConsumerExample {

    public static void main(String[] args) throws Exception {
        // 1. 创建 Flink 环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();

        // 2. 配置 Kafka Source
        KafkaSource<String> source = KafkaSource.<String>builder()
            .setBootstrapServers("192.168.8.61:9092,192.168.8.61:9093,192.168.8.61:9094") // Kafka 地址
            .setTopics("mysql.test_sm.teacher")         // Topic 名称
            .setGroupId("flink-consumer-group")   // 消费者组
            .setStartingOffsets(OffsetsInitializer.earliest())
            .setValueOnlyDeserializer(new SimpleStringSchema()) // 反序列化为字符串
            .build();

        // 3. 添加数据源
        DataStream<String> kafkaStream = env.fromSource(
            source,
            WatermarkStrategy.noWatermarks(),
            "Kafka Source"
        );

        // 4. 处理数据（解析 JSON）
        DataStream<JsonNode> parsedStream = kafkaStream.map(value -> {
            ObjectMapper objectMapper = new ObjectMapper();
            return objectMapper.readTree(value);
        });

        // 5. 提取业务数据（假设 Debezium 格式）
        DataStream<JsonNode> dataStream = parsedStream.map(node -> {
            // Debezium 的 "after" 字段包含最新数据
            JsonNode payload = node.get("payload");
            JsonNode after = payload.get("after");
            return after;
        });

        // 6. 打印或进一步处理
        dataStream.print();

        // 7. 执行任务
        env.execute("Flink Kafka Consumer Job");
    }
}