package com.intct.ods;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.util.KafkaUtil;
import com.intct.util.MysqlUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author gufg
 * @since 2025-08-18 16:47
 * ODS数据对接，主要对接是MySQL
 * 1、 MySQL开启Binlog，travel和travel_config
 *     修改：etc下的my.cnf
 *          增加：
 *              binlog-do-db=travel
 *              binlog-do-db=travel_config
 * 2、功能
 *      2.1 创建环境
 *      2.2 配置数据源--MySQL
 *      2.3 从数据源执行获取数据
 *          2.3.1 json字符串--> 格式
 *                             过滤空值
 *      2.4 配置输出源--Kafka
 *      2.5 启动作业
 */
public class OdsManApp {
    public static void main(String[] args) throws Exception {

        // 从命令中获取参数
        ParameterTool paraTools = ParameterTool.fromArgs(args);
        String mvnProjectPath = OdsManApp.class.getResource("/test.properties").getFile();

        String filePath = paraTools.get("filePath", mvnProjectPath);
        String topic =  "ods-db1"; // paraTools.get("kafka-topic-name", "ods-db");

        // 从配置文件中获取配置参数
        ParameterTool propertiesFile = ParameterTool.fromPropertiesFile(filePath);

        // 2.1 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 设置并行度
        env.setParallelism(1);

        // 开启检查
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);

        // 2.2 配置数据源--MySQL
        String[] tables = {"travel.area_info", "travel.driver_info", "travel.order_state", "travel.renter_info", "travel.vehicle_info"};
//        String[] tables = {"travel.area_info"};
        MySqlSource<String> mySqlSource = MysqlUtil.getMysqlSource(propertiesFile, StartupOptions.initial(), new String[]{"travel"}, tables);

        // 2.3 从数据源执行获取数据
        DataStreamSource<String> sourceDs = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "ods_source_db");

//        sourceDs.print();

        // 2.3.1 json字符串--> 格式
        SingleOutputStreamOperator<String> mapDS = sourceDs.map(new MapFunction<String, String>() {
            @Override
            public String map(String jsonStr) throws Exception {
                // 将Json string转json object
                JSONObject jsonObj = JSON.parseObject(jsonStr);

                // 获取source中的table
                String table = jsonObj.getJSONObject("source").getString("table");

                // 移除source key
                jsonObj.remove("source");
                jsonObj.remove("ts_ms");
                jsonObj.remove("transaction");

                // 增加table key
                jsonObj.put("table", table);

                // 将json object转为json string，并返回
                return JSON.toJSONString(jsonObj);
            }
        });

        // 过滤空值
        SingleOutputStreamOperator<String> filterDS = mapDS.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String jsonStr) throws Exception {
                return jsonStr != null;
            }
        });

        // 2.4 配置输出源--Kafka
        KafkaSink<String> kafkaSink = KafkaUtil.getKafkaSink(propertiesFile, topic);

        filterDS.print();

        // 输出
        filterDS.sinkTo(kafkaSink);

        // 2.5 启动作业
        env.execute();
    }
}
