package com.intct.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.hbase.bean.DimConfigBean;
import com.intct.func.DimConnectFunc;
import com.intct.func.HBaseSink;
import com.intct.ods.OdsManApp;
import com.intct.util.KafkaUtil;
import com.intct.util.MysqlUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author gufg
 * @since 2025-08-19 10:56
 * 1、功能
 *      2.1 创建环境
 *      2.2 配置数据源--Kafka
 *      2.3 配置数据源--travel_config数据库，dim_table_config表
 *      2.4 从数据源执行获取数据
 *          2.4.1 json字符串--> 格式 过滤空值(travel_config数据库 -- > flink-cdc)
 *      2.5 双流连接
 *          2.5.0 ？？？？  ==》广播
 *          2.5.1 判断是否为维度表，是：
 *              是维度表 --> sink == hbase
 *
 *              自定义sink，将数据写入HBase
 *      2.6 启动作业
 */
public class DimManApp {
    public static void main(String[] args) throws Exception {
        // 从命令中获取参数
        ParameterTool paraTools = ParameterTool.fromArgs(args);
        String mvnProjectPath = OdsManApp.class.getResource("/test.properties").getFile();

        String filePath = paraTools.get("filePath", mvnProjectPath);
        String topic = paraTools.get("kafka-topic-name", "ods-db");

        // 从配置文件中获取配置参数
        ParameterTool propertiesFile = ParameterTool.fromPropertiesFile(filePath);

        // 2.1 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 设置并行度
        env.setParallelism(1);

        // 开启检查
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);

        // 2.2 配置数据源--Kafka
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(propertiesFile, topic, "dim_group_id");
        DataStreamSource<String> kafkaSourceDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "dim_kafka_name");

//        kafkaSourceDS.print("kakfa source >>> ");

        // 2.3 配置数据源--travel_config数据库，dim_table_config表
//        MySqlSource<String> mysqlSource =
//                MysqlUtil.getMysqlSource(propertiesFile, StartupOptions.initial(), new String[]{"travel_config"}, new String[]{"travel_config.dim_table_config"});
        MySqlSource<String> mysqlSource =
                MysqlUtil.getMysqlSource(propertiesFile, StartupOptions.initial(), new String[]{"travel_config"}, new String[] {"travel_config.dim_table_config"});
        DataStreamSource<String> mysqlSourceDS = env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), "dim_mysql_name");

        // 2.4.1 json字符串--> 格式 过滤空值(travel_config数据库 -- > flink-cdc)
        SingleOutputStreamOperator<String> mapDS = mysqlSourceDS.map(new MapFunction<String, String>() {
            @Override
            public String map(String jsonStr) throws Exception {
                // 将Json string转json object
                JSONObject jsonObj = JSON.parseObject(jsonStr);

                // 获取source中的table
                String table = jsonObj.getJSONObject("source").getString("table");

                // 移除source key
                jsonObj.remove("source");
                jsonObj.remove("ts_ms");
                jsonObj.remove("transaction");

                // 将json object转为json string，并返回
                return JSON.toJSONString(jsonObj);
            }
        });

//        mapDS.print("map >>>> ");

        // 2.5 双流连接
        // 2.5.1 广播
        MapStateDescriptor mapStateDescriptor = new MapStateDescriptor("dimMapState", String.class, DimConfigBean.class);
        BroadcastStream<String> broadcastStream = mapDS.broadcast(mapStateDescriptor);

        // 2.5.2 双连接 (使用kafak做主流，flinkcdc==数仓配置表做从流)
        BroadcastConnectedStream<String, String> connectDS = kafkaSourceDS.connect(broadcastStream);

        // 2.5.3 双连接-- 处理数据
        SingleOutputStreamOperator<JSONObject> processStream = connectDS.process(new DimConnectFunc(mapStateDescriptor));

        processStream.print();

        // 2.6 Sink HBase -- 自定义  -- 继承RichSinkFunction
        processStream.addSink(new HBaseSink("intct83", 1));

        // todo 2.5 启动作业
        env.execute();
    }
}