package com.atguigu.app.dwd.db;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.app.func.DwdTableProcessFunction;
import com.atguigu.bean.TableProcess;
import com.atguigu.util.KafkaUtil_wm;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;

// TODO: 2023/3/27 测试开启：kafka  zk   maxwell
public class BaseDbApp {
    public static void main(String[] args) throws Exception {
        // TODO: 2023/3/25 需求：事实表分流 
        // TODO: 2023/3/25 获取 Flink 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );
        // TODO: 2023/3/25 读取 kafka 主题 topic_db 的数据，封装为流
        String topic = "topic_db";
        String groupId = "BaseDbApp02";
        DataStreamSource<String> kafkaDS = env.addSource( KafkaUtil_wm.getFlinkKafkaConsumer( topic, groupId ) );
        // TODO: 2023/3/25 过滤数据，封装为 json 格式
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.flatMap( new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    JSONObject jsonObject = JSONObject.parseObject( value );
                    out.collect( jsonObject );
                }
            }
        } );
        // TODO: 2023/3/25 使用 FlinkCDC 读取mysql配置表的数据，封装为流
        MySqlSource<String> mysqlDS = MySqlSource
                .<String>builder()
                .hostname( "47.102.112.46" )
                .port( 3306 )
                .databaseList( "edu_config" )
                .tableList( "edu_config.table_process" )
                .username( "root" )
                .password( "qw123456" )
                .startupOptions( StartupOptions.initial() )
                .deserializer( new JsonDebeziumDeserializationSchema() )
                .build();
        DataStreamSource<String> flinkCDCDS = env.fromSource( mysqlDS, WatermarkStrategy.noWatermarks(), "baseDb-mysql" );
        // TODO: 2023/3/25 将配置流转为广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>( "map-state", String.class, TableProcess.class );
        BroadcastStream<String> broadcastConnection = flinkCDCDS.broadcast( mapStateDescriptor );
        // TODO: 2023/3/25 连接两条流
        BroadcastConnectedStream<JSONObject, String> connectDS = jsonObjDS.connect( broadcastConnection );
        // TODO: 2023/3/25 主流根据配置流来过滤数据
        SingleOutputStreamOperator<JSONObject> processDS = connectDS.process( new DwdTableProcessFunction( mapStateDescriptor ) );
        // TODO: 2023/3/25 将数据写入 kafka
        processDS.print( "事实表分流 数据即将写出 >>>>>>>>>>>>>>>>" );
        DataStreamSink<JSONObject> resultDS = processDS.addSink( KafkaUtil_wm.getFlinkKafkaProducer_T( new KafkaSerializationSchema<JSONObject>() {
            @Override
            public ProducerRecord<byte[], byte[]> serialize(JSONObject element, @Nullable Long timestamp) {
                return new ProducerRecord<>( element.getString( "sink_table" ), element.getString( "data" ).getBytes() );
            }
        } ) );
        // TODO: 2023/3/25 启动任务
        env.execute( "BaseDbApp00" );
    }
}
