package com.bw.data_conversion.kafka;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import net.bwie.realtime.jtp.KafkaUtil;
import net.bwie.realtime.jtp.MysqlCdcUtil;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DWDcars_log {
    public static void main(String[] args) throws Exception {
        // 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 创建表的执行环境
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        // 设置检查点
        env.enableCheckpointing(5000);
        // 检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

        // 获取 cars_Log 数据源
        DataStream<String> cars_Log = MysqlCdcUtil.cdcMysqlRaw(env, "sca", "cars_Log");



        ObjectMapper objectMapper = new ObjectMapper();

        // 获取 after 数据
        DataStream<String> carsAfterData = cars_Log.map(json -> {
            try {
                // 将数据转换成JsonNode 提取 after 数据
                JsonNode rootNode = objectMapper.readTree(json);
                JsonNode afterNode = rootNode.get("after");
                return afterNode != null ? afterNode.toString() : "{}";
            } catch (Exception e) {
                return "{}";
            }
        });
        // 将after 数据写入kafka
        KafkaUtil.producerKafka(carsAfterData,"cars_Log");

        env.execute("DWDcars_log");

    }
}
