package com.atguigu.app.dim;

import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.TableProcess;
import com.atguigu.func.DimJdbcFunction;
import com.atguigu.func.DimTableProcessFunction;
import com.atguigu.uitl.MyKafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

/**
 * @author hjy
 * @create 2023/3/9 15:14
 */

//数据流:Web/app -> 业务服务器(Mysql) -> Maxwell -> Kafka(ODS) -> FlinkApp -> Phoenix(DIM)
//程  序:Mock -> Mysql -> Maxwell -> Kafka(ZK) -> DimApp(Mysql) -> Phoenix(HBase HDFS ZK)
public class dimApp {
    public static void main(String[] args) throws Exception {
        //todo 1 创建环境 检查点
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall-real-demo/check");
//        env.setStateBackend(new HashMapStateBackend());

        //todo 2 读取主流 从kafka拿数据
        String topic="topic_db";
        String groupID="gmall0926";
        //这里使用自定义工具类主要是因为，使用kafkasource时需要传入一个反序列化器，而常规的simpleString反序列化器中反序列化的方法要求传入的值非空，但是kafka中可能存在空数据，所以需要使用自定义的匿名内部类来过滤掉这一类型的数据
        DataStreamSource<String> kafkaDs = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupID));
        //todo 3 简单过滤并将主流内容转为JSonObject
        SingleOutputStreamOperator<JSONObject> dataStream = kafkaDs.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    JSONObject jsonObject = null;
                    try {
                        //这里使用try catch 是因为当parseObject处理的数据不是标准的json类型时会有运行时异常，所以为了不影响程序运行，将异常抓住
                        jsonObject = JSONObject.parseObject(value);
                        out.collect(jsonObject);
                    } catch (JSONException e) {
                        System.out.println("脏数据" + value);
                    }
                }
            }
        });
        dataStream.print("dataStream-------->>>>>>>");
        //todo 4 拿到配置流
        //这里的配置流是从mysql中拿到的，使用的是flink-cdc，因为这个api可以直接监控mysql的binlog日志
        MySqlSource<String> mysql = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("123456")
                .databaseList("gmall_config")
                .tableList("gmall_config.table_process")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.initial())
                .build();
        DataStreamSource<String> configDs = env.fromSource(mysql, WatermarkStrategy.noWatermarks(), "mysql");
        //todo 5 转为广播流
        //key 是表名  value是数据
        MapStateDescriptor<String, TableProcess> broadCast = new MapStateDescriptor<>("broadCast", String.class, TableProcess.class);
        BroadcastStream<String> broadcastStream = configDs.broadcast(broadCast);
        //todo 6 连接流
        BroadcastConnectedStream<JSONObject, String> connect = dataStream.connect(broadcastStream);

        //todo 7 处理连接流的数据
        SingleOutputStreamOperator<JSONObject> hBaseStream = connect.process(new DimTableProcessFunction(broadCast));
        //todo 8 将数据写入phoenix中
        hBaseStream.print("hBaseStream----------->>>>>>>>>");
        hBaseStream.addSink(new DimJdbcFunction());
        //todo 9 执行
        env.execute();
    }
}
