package com.atguigu.edu.realtime.app.dwd.db;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.BaseDbTableProcessFunction;
import com.atguigu.edu.realtime.bean.BaseDbTableProcess;
import com.atguigu.edu.realtime.util.KafkaUtils;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.calcite.shaded.org.checkerframework.checker.nullness.qual.Nullable;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

public class BaseDbApp {
    public static void main(String[] args) {
        //1 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //2 设置检查点
        env.enableCheckpointing(5000L);
        env.getCheckpointConfig().setCheckpointTimeout(500000L);
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500L);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(5)));
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/edu/ck");
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        //3 获取topic_db主题数据
        String topic = "topic_db";
        String groupId = "BaseDbApp";
        KafkaSource<String> kafkaSource = KafkaUtils.getKafkaSource(topic, groupId);
        DataStreamSource<String> kafkaSourceDs = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source");
        //4 过滤数据
        SingleOutputStreamOperator<JSONObject> typeDs = kafkaSourceDs.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String str, ProcessFunction<String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObj = JSON.parseObject(str);
                    String type = jsonObj.getString("type");
                    if (!type.startsWith("bootstrap-")) {
                        collector.collect(jsonObj);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
        //typeDs.print(">>>>>>");
        //5 获取FlinkCDC配置表信息
        Properties props = new Properties();
        props.setProperty("useSSl", "false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("edu_config") // set captured database
                .tableList("edu_config.table_process_dwd") // set captured table
                .username("root")
                .password("000000")
                //.jdbcProperties(props)
                .serverTimeZone("Asia/Shanghai")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.initial())
                .build();
        DataStreamSource<String> mysqlDs = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysqlDs");
//        mysqlDs.print("flinkCDC:");
        //6 flinkCDC数据广播
        MapStateDescriptor<String, BaseDbTableProcess> mapStateDescriptor = new MapStateDescriptor<String, BaseDbTableProcess>("mapStateDescriptor", String.class, BaseDbTableProcess.class);
        BroadcastStream<String> broadcastDs = mysqlDs.broadcast(mapStateDescriptor);
        //7 连接
        BroadcastConnectedStream<JSONObject, String> connectDs = typeDs.connect(broadcastDs);
        //8 处理数据
        SingleOutputStreamOperator<JSONObject> processDs = connectDs.process(new BaseDbTableProcessFunction(mapStateDescriptor));
        //processDs.print(">>>>>>>");
        //9 输出
        processDs.sinkTo(
                KafkaUtils.getKafkaSinkBySchema(new KafkaRecordSerializationSchema<JSONObject>() {
                    //加购表 答卷表 答题表 收藏表 章节评价 课程评价
                    @Nullable
                    @Override
                    public ProducerRecord<byte[], byte[]> serialize(JSONObject jsonObj, KafkaSinkContext context, Long timestamp) {
                        String sinkTopic = jsonObj.getString("sink_table");
                        jsonObj.remove("sink_table");
                        return new ProducerRecord<byte[], byte[]>(sinkTopic, jsonObj.toJSONString().getBytes());
                    }
                })
        );

        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
