package com.example;

import com.alibaba.fastjson.JSONObject;
import com.example.domain.UserEvent;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

/**
 * @author Ann
 * @version v1.0
 * @ClassName FlinkKafkaCDCApp
 * @Description
 * @date Created in 2025/5/13
 **/
@Slf4j
public class FlinkKafkaCDCApp {
    public static void main(String[] args) throws Exception {
        // 1. 创建 Flink 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 2. 设置 Kafka 消费配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "localhost:9092");
        props.setProperty("group.id", "flink-cdc-group");
        props.setProperty("auto.offset.reset", "earliest");


        // 3. 创建 Kafka Source
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(
                "demo",  // Kafka topic
                new SimpleStringSchema(),       // 消息序列化方式
                props
        );

        // 4. 添加数据源
        DataStreamSource<String> stream = env.addSource(kafkaConsumer);

        // 5. 简单打印 + JSON 解析
//        stream.map(json -> {
//            JSONObject obj = JSONObject.parseObject(json);
//            log.info("接收到数据: " + obj.toJSONString());
//
//            return obj;
//        }).print();
        stream.map(json -> {
                    JSONObject obj = JSONObject.parseObject(json);
                    return new UserEvent(
                            obj.getString("userId"),
                            obj.getInteger("amount"),
                            obj.getLong("timestamp")
                    );
                })
                .keyBy(event -> event.userId)
//                .window(TumblingProcessingTimeWindows.of(Time.milliseconds(5000)))
                .reduce((item1, item2) -> new UserEvent(item1.userId, item1.amount + item2.amount, item1.timestamp))
                .print();

        // 6. 启动
        env.execute("Flink CDC Kafka Java 8 Demo");
    }
}
