package com.atguigu.gmall.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
 import com.atguigu.gmall.realtime.app.dim.function.CheckTableFunction;
import com.atguigu.gmall.realtime.app.dim.function.DimSinkFuntion;
import com.atguigu.gmall.realtime.app.dim.function.TableProcessFunction;
import com.atguigu.gmall.realtime.bean.TableProcess;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;

import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.util.Collector;

public class DimApp {

//1  环境
//2  主流接收topic_db ( 启动maxwell)
//3  主流转换结构 jsonObject
//4   定义cdc数据源（ mysql配置表要建好，binlog要可以采集）
//5  把cdc数据源加载为数据流（配置流）
//6  要把配置流的数据广播进主流  两流合一流
//7   实现一个方法处理数据（1 处理数据变化  2 处理配置变化）
//8   把数据写入hbase phoenix
    public static void main(String[] args) throws Exception {
        //1  环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000);
        env.setParallelism(4);
        //2  主流接收topic_db ( 启动maxwell)
        String topic="topic_db";
        String groupId="dim_app";
        DataStreamSource<String> kafkaStream = env.addSource(MyKafkaUtil.getKafkaConsumer(topic, groupId));
        //3  主流转换结构 jsonObject
        SingleOutputStreamOperator<JSONObject> jsonObjStream = kafkaStream.map(jsonString -> JSON.parseObject(jsonString));

        jsonObjStream.print();
        //4   定义cdc数据源（ mysql配置表要建好，binlog要可以采集）
        MySqlSource<String> source = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("gmall_realtime_config") // set captured database
                .tableList("gmall_realtime_config.table_process") // set captured table
                .username("root")
                .password("000000")
               .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .build();
        //5  把cdc数据源加载为数据流（配置流）
        DataStreamSource<String> cdcStream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "cdc_source");
        ///DataStreamSource<String> cdcStream = env.addSource(source);

        // 此处 cdcStream 的并行度为1
        // 1  如果phoenix没有表 创建   2如果配置表字段增加 ， phoenix也增加字段
        SingleOutputStreamOperator<String> cdcCheckedStream = cdcStream.map(new CheckTableFunction());


        //6  要把配置流的数据广播进主流  两流合一流
        //6.1 对维度配置状态的定义

        MapStateDescriptor<String, TableProcess> tableProcessStateDesc= new MapStateDescriptor<String, TableProcess>("table_process",String.class,TableProcess.class);
        //6.2 把配置流变为广播流
        BroadcastStream<String> broadcastStream=  cdcCheckedStream.broadcast(tableProcessStateDesc);

        //6.3 把主流与广播流合流
        BroadcastConnectedStream<JSONObject, String> dataWithBroadCastStream = jsonObjStream.connect(broadcastStream);


        //7   实现一个方法处理数据（1 处理数据变化  2 处理配置变化）
        SingleOutputStreamOperator<JSONObject> dataForSinkStream = dataWithBroadCastStream.process(new TableProcessFunction(tableProcessStateDesc));


        //  测试：1  事实表是否被过滤掉 2 字段是否经过剪裁 3 是否添加了 最终要保存的目标表标识
        dataForSinkStream.print("最终要保存到维度的数据：");




        //8    保存到phoenix
          dataForSinkStream.addSink(new DimSinkFuntion());

        env.execute();

    }
}
