package com.atguigu.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.app.func.MyBroadcastFunction;
import com.atguigu.app.func.MyPhoenixSink;
import com.atguigu.bean.TableProcess;
import com.atguigu.util.KafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class DimAPP {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        String topicName="topic_db";
        String groupId="dim_app_0409";
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.getFlinkKafkaConsumer(topicName, groupId));
        //streamSource.print();
        /*SingleOutputStreamOperator<JSONObject> jsonStream = streamSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                out.collect(jsonObject);
            }
        });*/
        OutputTag<String> dirty = new OutputTag<String>("dirty"){};
        SingleOutputStreamOperator<JSONObject> processStream = streamSource.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                //过滤非json的数据
                JSONObject jsonObject = JSON.parseObject(value);
                String type = jsonObject.getString("type");
                //过滤脏数据
                if ("bootstrap-start".equals(type) || "bootstrap-complete".equals(type)) {
                    ctx.output(dirty, value);
                } else {
                    out.collect(jsonObject);
                }
            }
        });
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .username("root")
                .password("123456")
                .hostname("101.133.170.75")
                .port(3306)
                .databaseList("edu_config")
                .tableList("edu_config.table_process")
                // CDC读取数据的模式  -> 初始数据全部读一遍
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();
        DataStreamSource<String> flinkCDC = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "flinkCDC");
        // 广播状态保存什么
        // key -> source table `用来判断是否为维度表`
        // value -> 使用javaBean存储完整的一行数据
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("table_process", String.class, TableProcess.class);
        BroadcastStream<String> broadcastStream = flinkCDC.broadcast(mapStateDescriptor);
        //连接流
        BroadcastConnectedStream<JSONObject, String> connect = processStream.connect(broadcastStream);
        //处理连接流
        SingleOutputStreamOperator<JSONObject> tableProcessStream = connect.process(new MyBroadcastFunction(mapStateDescriptor));
        //tableProcessStream.print();
        //数据写到Hbase
        tableProcessStream.addSink(new MyPhoenixSink());

        env.execute(groupId);
    }
}
