package com.intct.ods;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.common.Constant;
import com.intct.util.FromPropertiesFileUtil;
import com.intct.util.KafkaUtil;
import com.intct.util.MySqlUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.producer.ProducerConfig;

/**
 * @author gufg
 * @since 2025-07-25 14:06
 * 从业务数据同步业务数据
 * 业务数据
 * 维表数据
 * 码表数据
 * 。。。
 * flink run -
 */
public class OdsApp {
    public static void main(String[] args) throws Exception {
        // flink --class ods.OdsApp

        // 创建环境信息
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);

        MySqlSource<String> mySqlSource = MySqlUtil.getMysqlSource(StartupOptions.initial());

        // 从源获取数据
        DataStreamSource<String> sourceDS = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "ods_app_sourace");
        sourceDS.print("sourceDS>>>>>>>>>>");
        // 转换操作
        SingleOutputStreamOperator<String> mapDS = sourceDS.map(new MapFunction<String, String>() {
            @Override
            public String map(String jsonStr) throws Exception {
                // 将字符串转为JSONObject类型
                JSONObject jsonObj = JSONObject.parseObject(jsonStr);

                // 从JSONObject中获source键下所有key-value,并转为JSONObject类型
                JSONObject source = jsonObj.getJSONObject(Constant.MYSQL_CDC_JSON_KEY_SOURCE);

                // 获取source键的数据库名称
                String dbName = source.getString(Constant.MYSQL_CDC_JSON_KEY_SOURCE_DB);
                // 获取source键的表名称
                String table = source.getString(Constant.MYSQL_CDC_JSON_KEY_SOURCE_TABLE);

                // 删除JSONObject中的source键
                jsonObj.remove(Constant.MYSQL_CDC_JSON_KEY_SOURCE);
                jsonObj.remove(Constant.MYSQL_CDC_JSON_KEY_TRANSTACTION);

                // 向JSONObject中增加key-value
                jsonObj.put(Constant.MYSQL_CDC_JSON_KEY_SOURCE_DB, dbName);
                jsonObj.put(Constant.MYSQL_CDC_JSON_KEY_SOURCE_TABLE, table);

                return JSON.toJSONString(jsonObj);
            }
        });

        mapDS.print("mapDS>>>>>>>>>>");

        // 过滤掉空值
        SingleOutputStreamOperator<String> filterDS = mapDS.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                return value != null;
            }
        });

        filterDS.print("filterDS>>>>>>>>>>");

        // 输出配置信息  -- kafka/hdfs
        KafkaSink<String> kafkaSink = KafkaUtil.getKafkaSink(Constant.KAFKA_ODS_DB_TOPIC, "ods-db-prefix-id");

        // 将数据写入Kafka
        filterDS.sinkTo(kafkaSink);

        // 执行作业
        env.execute();
    }


}
