package com.atguigu.gmall.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.dim.func.DimSinkFunction;
import com.atguigu.gmall.realtime.app.dim.func.TableProcessFunction;
import com.atguigu.gmall.realtime.bean.TableProcess;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

/**
 *  目的：  把维度数据写入到hbase中
 */
public class DimApp {


    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //1 得到topic_db的数据流
        String topic="topic_db";
        String groupId="dim_app_group";
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topic, groupId);

        DataStreamSource<String> kafkaDstream = env.addSource(kafkaConsumer);
        SingleOutputStreamOperator<JSONObject> jsonObjDstream = kafkaDstream.map(jsonstring -> JSON.parseObject(jsonstring));
        SingleOutputStreamOperator<JSONObject> gmallJsonObjDstream = jsonObjDstream.filter(jsonObj -> jsonObj.getString("database").equals("gmall"));

        //{"database":"gmall","table":"user_info","type":"update","ts":1665153019,"xid":3454,"commit":true,"data":{"id":2,"login_name":"834sko4sh","nick_name":"香香77777","passwd":null,"name":"何香","phone_num":"13843974462","email":"834sko4sh@sina.com","head_img":null,"user_level":"2","birthday":"2005-02-14","gender":"F","create_time":"2020-06-14 17:36:43","operate_time":null,"status":null},"old":{"nick_name":"香香45666"}}
       // kafkaDstream.print();

        //2 得到 维度配置表的数据流
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("gmall_realtime_config") // set captured database
                .tableList("gmall_realtime_config.table_process") // set captured table
                .username("root")
                .password("000000")
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .startupOptions(StartupOptions.initial()) //默认  抓取最新快照 jdbc +  追踪之后的变化 binlog
                .build();

        DataStreamSource<String> tableProcessDataStream = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "table_process").setParallelism(1);


    //    tableProcessDataStream.print("cdc 获得数据 :");


        //3  广播处理
        //3.1 为状态做定义
        // 状态格式 通过表名 来查询一套 table_process信息
        MapStateDescriptor<String, TableProcess> tableProcessMapStateDescriptor = new MapStateDescriptor<String, TableProcess>("table_process_state",String.class,TableProcess.class);
        //3.2  把配置表的数据流变成广播流
        BroadcastStream<String> tableProcessBroadCastDStream = tableProcessDataStream.broadcast(tableProcessMapStateDescriptor);
        // 3.3 把广播流合并到主流中  --> 被广播的流
        BroadcastConnectedStream<JSONObject, String> broadcastConnectedStream = gmallJsonObjDstream.connect(tableProcessBroadCastDStream);
        // 三个泛型  1 主流类型 2广播流类型 3 最终输出类型
        SingleOutputStreamOperator<JSONObject> dimJsonObjDStream = broadcastConnectedStream.process(new TableProcessFunction(tableProcessMapStateDescriptor));

        dimJsonObjDStream.print("维度数据：");

        dimJsonObjDStream.addSink(new DimSinkFunction());

        env.execute();



    }
}
