package com.intct.ods;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.common.Constant;
import com.intct.utils.KafkaUtil;
import com.intct.utils.MysqlUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author gufg
 * @since 2025-09-17 10:02
 * 从业务数据抽取数据
 * flink run -p 4 -c -ods_mysql_host cdh-node -ods_mysql_port 13306 com.initct.ods.OdsDatabaseApp
 */
public class OdsDatabaseAPI {
    public static void main(String[] args) throws Exception {
        // 从运行作业命令获取参数
        // ParameterTool parameterTool = ParameterTool.fromArgs(args);
        // String mysqlHost = parameterTool.get("ods_mysql_host", "node1");

        // 获取绝对路径中配置文件
        // ParameterTool propertiesFile = ParameterTool.fromPropertiesFile("d:/test/intct.properties");

        // 从工程中resoucres路径读取配置文件
        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(OdsDatabaseAPI.class.getResource("/intct.properties").getFile());

        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 开启检查
        env.enableCheckpointing(6000L, CheckpointingMode.EXACTLY_ONCE);

        // 配置连接MysqlSource 读取全量
        String allTableList = parameterTool.get("mysql.all.table", "mysql.all.table");
        MySqlSource<String> mySqlSourceAll = MysqlUtil.getMysqlSource(parameterTool, allTableList, StartupOptions.initial());
        toKafka(env, mySqlSourceAll, parameterTool);

        // 配置连接MysqlSource 读取最新变化
        String incrTableList = parameterTool.get("mysql.incr.table", "mysql.incr.table");
        MySqlSource<String> mySqlSource = MysqlUtil.getMysqlSource(parameterTool, incrTableList, StartupOptions.latest());
        toKafka(env, mySqlSource, parameterTool);

        // 启动作业
        env.execute("mysql-cdc-1");

    }

    /**
     *
     * @param env
     * @param mySqlSource
     * @param parameterTool
     */
    private static void toKafka(StreamExecutionEnvironment env, MySqlSource<String> mySqlSource, ParameterTool parameterTool) {
        // 获取数据 json字符串
        DataStreamSource<String> sourceDS =
                env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql_source_1");

        // 过滤空值
        SingleOutputStreamOperator<String> filterDS = sourceDS.filter(source -> source != null);

        // filterDS.print("filter --->");

        //  格式检查 和 多余JSON Key删除
        SingleOutputStreamOperator<String> mapDS = filterDS.map(new MapFunction<String, String>() {
            @Override
            public String map(String jsonStr) throws Exception {
                // json字符串转json对象
                JSONObject jsonObj = JSON.parseObject(jsonStr);

                // 获取source 对象
                JSONObject sourceObj = jsonObj.getJSONObject(Constant.FLINK_CDC_MYSQL_JSON_SOURCE_KEY);

                // 获取db table file pos 将db table file pos增加Json对象
                jsonObj.put(Constant.FLINK_CDC_MYSQL_JSON_FILE_KEY, sourceObj.getString(Constant.FLINK_CDC_MYSQL_JSON_FILE_KEY));
                jsonObj.put(Constant.FLINK_CDC_MYSQL_JSON_POS_KEY, sourceObj.getString(Constant.FLINK_CDC_MYSQL_JSON_POS_KEY));
                jsonObj.put(Constant.FLINK_CDC_MYSQL_JSON_DB_KEY, sourceObj.getString(Constant.FLINK_CDC_MYSQL_JSON_DB_KEY));
                jsonObj.put(Constant.FLINK_CDC_MYSQL_JSON_TABLE_KEY, sourceObj.getString(Constant.FLINK_CDC_MYSQL_JSON_TABLE_KEY));

                // 删除source
                jsonObj.remove(Constant.FLINK_CDC_MYSQL_JSON_SOURCE_KEY);
                jsonObj.remove(Constant.FLINK_CDC_MYSQL_JSON_TS_MS_KEY);
                jsonObj.remove(Constant.FLINK_CDC_MYSQL_JSON_TTRASACTION_KEY);

                return jsonObj.toJSONString();
            }
        });

        mapDS.print("mapDS --> ");


        // 配置输出数据源 -- Kafka
        KafkaSink<String> kafkaSink = KafkaUtil.getKafaSink(parameterTool, "ods-db", "mysql-cdc-");
        // sink kafka
        mapDS.sinkTo(kafkaSink);

    }
}