package com.atguigu.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.app.func.DimSinkFunction;
import com.atguigu.app.func.DimTableProcessFunction;
import com.atguigu.bean.TableProcess;
import com.atguigu.util.KafkaUtil_wm;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class DimApp_Edu {
    public static void main(String[] args) throws Exception {
        // todo 01 获取 flink 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );
//        // todo 2.1 开启 checkpoint 每隔 5 s 做一次ck，指定精确一次
//        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE); // todo 生产环境一般设置5-8分钟
//        // todo 2.2 设置超时时间为 1 分钟
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        // todo 2.3 设置两次重启的最小时间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        // todo 2.4 设置任务关闭的时候保留最后一次 ck 数据
//        env.getCheckpointConfig().enableEedu_220926xternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        //todo 2.5 指定从 ck 自动重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(1L), Time.minutes(1L)));
//        //todo 2.6 指定状态后端
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/flinkCDC/");
//        //todo 2.7 设置访问hdfs的用户名
//        System.setProperty("HADOOP_USER_NAME", "atguigu");
        // TODO: 2023/3/24 读取 kafka主题 topic_db 数据 封装为流
        String topic = "topic_db";
        String groupId = "DimApp_Edu";
        DataStreamSource<String> kafkaDS = env.addSource( KafkaUtil_wm.getFlinkKafkaConsumer( topic, groupId ) );
        // todo 03 过滤并转换为 json 格式  flatmap() 返回值类型：JSONObject
        //  主流：jsonObjDS
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.flatMap( new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    try {
                        JSONObject jsonObject = JSON.parseObject( value );
                        out.collect( jsonObject );
                    } catch (JSONException e) {
                        e.printStackTrace();
                        System.out.println( "非json格式数据" + value );
                    }
                }
            }
        } );

        // todo 04 难点：使用 flinkCDC读取配置信息表  sqlSource
        MySqlSource<String> sqlSource = MySqlSource
                .<String>builder()
                .hostname( "47.102.112.46" )
                .port( 3306 )
                .username( "root" )
                .password( "qw123456" )
                .databaseList( "edu_config" )
                .tableList( "edu_config.table_process" )// 库里面只有一张表，不写也行
                .startupOptions( StartupOptions.initial() )
                .deserializer( new JsonDebeziumDeserializationSchema() )
                .build();
        // 拿到配置流   mysqlDS
        DataStreamSource<String> mysqlDS = env.fromSource( sqlSource, WatermarkStrategy.noWatermarks(), "MysqlSource" );

        mysqlDS.print( "mysqlDS>>>>>>>>>>>" );

        // todo 05 将配置信息流转换为广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>( "map-state", String.class, TableProcess.class );
        //拿到广播流   broadcastDS
        BroadcastStream<String> broadcastDS = mysqlDS.broadcast( mapStateDescriptor );
        // todo 06 连接主流和广播流
        BroadcastConnectedStream<JSONObject, String> connectedStream = jsonObjDS.connect( broadcastDS );

        // todo 07 难点：主流根据广播流数据来处理（过滤）主流数据
        SingleOutputStreamOperator<JSONObject> resultDS = connectedStream.process( new DimTableProcessFunction( mapStateDescriptor ) );

        // todo 08 将数据写出到 Phoenix      DimSinkFunction
        resultDS.print( "数据即将写入Phoenix >>>>>>>>>" );
        resultDS.addSink( new DimSinkFunction() );
        // todo 09 启动任务
        env.execute( "DimApp" );
    }
}
