package com.bw.data_conversion.kafka;


import com.fasterxml.jackson.databind.JsonNode;
import net.bwie.realtime.jtp.KafkaUtil;
import net.bwie.realtime.jtp.MysqlCdcUtil;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import com.fasterxml.jackson.databind.ObjectMapper;

public class DWDentrance_guard_log {
    public static void main(String[] args) throws Exception {
        // 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 创建表的执行环境
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        // 设置检查点
        env.enableCheckpointing(5000);
        env.getCheckpointConfig().setCheckpointTimeout(60000); // 检查点超时时间
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        获取数据源
        DataStream<String> entrance_guard_log = MysqlCdcUtil.cdcMysqlRaw(env, "sca", "entrance_guard_log");

        // 输出
//        entrance_guard_log.print();

          /*
           只需要获取after数据
         {"before":null,
         "after":{"id":"cffb75c9-9d2f-4d23-bb6c-8e74d5d67a54","openMode":4,"openTime":"2025-08-21 11:47:38.719","ownerId":10004866},
         "source":{"version":"1.9.7.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1755748058000,"snapshot":"false",
         "db":"sca","sequence":null,"table":"entrance_guard_log","server_id":1,"gtid":null,"file":"mysql-bin.000082","pos":2314671,"row":0,"thread":455,"query":null}
         ,"op":"c","ts_ms":1755768498436,"transaction":null}
         */
        // 创建对象
        ObjectMapper objectMapper = new ObjectMapper();

        // 获取 after 数据
        DataStream<String> entranceGuardAfterData = entrance_guard_log.map(json -> {
            try {
                // 将数据转换成JsonNode 提取 after 数据
                JsonNode rootNode = objectMapper.readTree(json);
                JsonNode afterNode = rootNode.get("after");
                return afterNode != null ? afterNode.toString() : "{}";
            } catch (Exception e) {
                return "{}";
            }
        });
        // 将after 数据写入kafka
        KafkaUtil.producerKafka(entranceGuardAfterData,"entrance_guard_log");

        env.execute("DWDentrance_guard_log");
    }







}



