package com.intct.dim;

import com.alibaba.fastjson.JSONObject;
import com.intct.hbase.bean.DimConfigBean;
import com.intct.common.Constant;
import com.intct.func.DimBroadcastFunc;
import com.intct.func.HBasekSinkFunc;
import com.intct.util.KafkaUtil;
import com.intct.util.MysqlUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author gufg
 * @since 2025-07-03 09:51
 */
public class DimApplication {
    public static void main(String[] args) throws Exception {
        // 配置属性
        Configuration conf = new Configuration();
        conf.set(RestOptions.BIND_PORT, "8082");

        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);

        // 配置数据源 -- flinkcdc获取mysql中DIM配置表
        MySqlSource<String> mysqlSource = MysqlUtil.getMysqlSource(StartupOptions.initial(), "travel_config", "travel_config.dim_table_config");
        DataStreamSource<String> mysqlSourceDS = env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), "dim_mysql_name");

        // 配置数据源 -- kafka
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(Constant.KAFKA_ODS_TOPIC_NAME, "dim_gid");
        DataStreamSource<String> kafkasourceDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "dim_application-1");

        kafkasourceDS.print("kafkasourceDS>>>>>>");

         // {"before":null,"after":{"source_table":"driver_info","sink_table":"dim_driver_info","sink_family":"info","sink_columns":"id,mobile,driver_name,create_time,register_city,driver_type,cancel_count,driver_management_id","sink_row_key":"id"},
        // "source":{"version":"1.9.7.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":0,"snapshot":"false","db":"travel_config","sequence":null,"table":"dim_table_config","server_id":0,"gtid":null,"file":"","pos":0,"row":0,"thread":null,"query":null},
        // "op":"r","ts_ms":1751508968201,"transaction":null}

        // 广播状态
        MapStateDescriptor mapStateDescriptor = new MapStateDescriptor<>("dimMysql", String.class, DimConfigBean.class);
        BroadcastStream<String> broadcastStream = mysqlSourceDS.broadcast(mapStateDescriptor);

        // Kafka流与广播流连接(双流连接或者双流join)
        BroadcastConnectedStream<String, String> connectStream = kafkasourceDS.connect(broadcastStream);
        SingleOutputStreamOperator<JSONObject> processDS = connectStream.process(new DimBroadcastFunc(mapStateDescriptor));

        // 输出HBase
        processDS.addSink(new HBasekSinkFunc());

        // 启动作业
        env.execute("dimApplication");
    }
}
