package com.atguigu.app.dim;

import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.Func.DimJdbcSink;
import com.atguigu.Func.DimTableProcessFunctiom;
import com.atguigu.Util.MyKafkaUtil;

import com.atguigu.bean.TableProcess;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;


/**
 * @author hjy
 * @create 2023/3/8 11:35
 */
/*
数据流：app-->mysql-->maxwell-->kafka(ods)-->flinkApp-->phoenix(dim)
程序：mock-->mysql-->maxwell-->kafka(zk)-->flinkApp-->phoenix(hdfs zk hbase)
 */
public class dimApp {
    public static void main(String[] args) throws Exception {
        //todo 1 执行环境 && 检查点
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //5秒一次检查
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        //检查点存储路径
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall-flink/check");
//        //多就不成功就放弃
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        // 状态后端 ！！！
//        env.setStateBackend(new HashMapStateBackend());
        //设置HDFS用户信息
        //System.setProperty("HADOOP_USER_NAME", "atguigu");
        //todo 2 建立kafka连接  topic topic_db获取主流
        String topic="topic_db";
        String gropuID="dimApp";
        DataStreamSource<String> kafkaDs = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, gropuID));

        // todo 3 简单过滤和将数据转为json对象 主流
        SingleOutputStreamOperator<JSONObject> jsonObjDs = kafkaDs.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    JSONObject jsonObject = null;
                    try {
                        jsonObject = JSONObject.parseObject(value);
                        out.collect(jsonObject);
                    } catch (JSONException e) {
                        System.out.println("脏数据" + value);
                    }
                }
            }
        });
        //todo 4 获取配置流  使用cdc
        //这里要记得给数据库开启binglog
        MySqlSource<String> mysql = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("123456")
                .databaseList("gmall_config")
                .tableList("gmall_config.table_process")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();
        DataStreamSource<String> confDs = env.fromSource(mysql, WatermarkStrategy.noWatermarks(), "mysql");

        // todo 5 配置流转为广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("broad-cast", String.class, TableProcess.class);
        BroadcastStream<String> broadcastDs = confDs.broadcast(mapStateDescriptor);

        // todo 6 连接两个流
        BroadcastConnectedStream<JSONObject, String> connect = jsonObjDs.connect(broadcastDs);
        //todo 7 处理两个流

        SingleOutputStreamOperator<JSONObject> hBaseStream = connect.process(new DimTableProcessFunctiom(mapStateDescriptor));

        //todo 8 写入数据
        hBaseStream.print("hBaseStream------>>");
        hBaseStream.addSink(new DimJdbcSink());

        //todo 9 执行
        env.execute();
    }
}
