package com.atguigu.gmall.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.app.func.MyBroadcastFunction;
import com.atguigu.gmall.app.func.MyPhoenixSink;
import com.atguigu.gmall.bean.TableProcess;
import com.atguigu.gmall.util.KafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * @author yhm
 * @create 2022-09-04 15:08
 */
public class DimApp {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);


        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
        */

        // TODO 3 读取kafka的数据
        String topicName = "topic_db";
        String groupId = "dim_app_0409";
        DataStreamSource<String> dbStream = env.addSource(KafkaUtil.getFlinkKafkaConsumer(topicName, groupId));
//        dbStream.print("db>>>>");

        // TODO 4 过滤脏数据并转换为jsonObject
        // jsonObject和javaBean对比  ->
        // javaBean有对应的属性名称 可以直接调用
        // jsonObject不需要提前声明 可以随意添加删除数据
        OutputTag<String> dirtyOutputTag = new OutputTag<String>("dirty"){};
        SingleOutputStreamOperator<JSONObject> processStream = dbStream.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    // 将类型为bootstrap-start和bootstrap-complete的数据过滤掉
                    String type = jsonObject.getString("type");
                    if ("bootstrap-start".equals(type) || "bootstrap-complete".equals(type)) {
                        // 空数据写出到侧输出流
                        ctx.output(dirtyOutputTag, value);
                    } else {
                        out.collect(jsonObject);
                    }

                } catch (Exception e) {
                    e.printStackTrace();
                    ctx.output(dirtyOutputTag, value);
                }
            }
        });

        DataStream<String> sideOutput = processStream.getSideOutput(dirtyOutputTag);
        sideOutput.print("dirty>>>");

        // TODO 5 使用flinkCDC实时监控配置表
        MySqlSource<String> stringMySqlSource = MySqlSource.<String>builder()
                .username("root")
                .password("123456")
                .hostname("hadoop102")
                .port(3306)
                .databaseList("gmall_config")
                // 坑
                .tableList("gmall_config.table_process")
                // CDC读取数据的模式  -> 初始数据全部读一遍
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();

        DataStreamSource<String> flinkCDC = env.fromSource(stringMySqlSource, WatermarkStrategy.noWatermarks(), "flinkCDC");

        // 广播状态保存什么
        // key -> source table 用来判断是否为维度表
        // value -> 使用javaBean存储完整的一行数据
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("table_process", String.class, TableProcess.class);
        BroadcastStream<String> broadcastStream = flinkCDC.broadcast(mapStateDescriptor);

        // TODO 6 连接主流和广播流
        BroadcastConnectedStream<JSONObject, String> connectStream = processStream.connect(broadcastStream);

        // TODO 7 处理连接流
        SingleOutputStreamOperator<JSONObject> tableProcessStream = connectStream.process(new MyBroadcastFunction(mapStateDescriptor));

        tableProcessStream.print("tableProcess>>>>>>>>");

        // TODO 8 数据写出到hbase
        tableProcessStream.addSink(new MyPhoenixSink());

        // TODO 执行
        env.execute(groupId);

    }
}
