package com.zhang.gmall.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.DebeziumSourceFunction;
import com.zhang.gmall.app.func.DimSinkFunc;
import com.zhang.gmall.app.func.MyDeserializationSchema;
import com.zhang.gmall.app.func.TableProcessFunc;
import com.zhang.gmall.beans.TableProcess;
import com.zhang.gmall.utils.KafkaUtil;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * @title: 维度数据写入hbase
 * @author: zhang
 * @date: 2022/3/23 16:51
 */
public class BaseDBApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //TODO 2.检查点相关设置
        //TODO 3.从kafka 业务数据主题消费数据、过滤脏数据
        String sourceTopic = "topic_db";
        String groupId = "BaseDBApp_dev";
        OutputTag<String> dirtyTag = new OutputTag<String>("Dirty") {
        };
        SingleOutputStreamOperator<JSONObject> kafkaDS = env
                .addSource(KafkaUtil.getKafkaSource(sourceTopic, groupId))
                .process(new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String value, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                        try {
                            JSONObject jsonObject = JSON.parseObject(value);
                            out.collect(jsonObject);
                        } catch (Exception e) {
                            ctx.output(dirtyTag, value);
                        }
                    }
                });
        //打印脏数据
        //kafkaDS.getSideOutput(dirtyTag).print("Dirty");
        kafkaDS.print();
        //TODO 4.使用FlinkCDC 读取mysql 配置表数据创建配置流
        DebeziumSourceFunction<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop103")
                .port(3306)
                .username("root")
                .password("000000")
                .databaseList("gmall_realtime_2022")
                .tableList("gmall_realtime_2022.table_process_dev")
                .startupOptions(StartupOptions.initial())
                .deserializer(new MyDeserializationSchema())
                .build();

        DataStreamSource<String> mysqlDS = env.addSource(mySqlSource);
        mysqlDS.print("mysql");
        //TODO 5.声明广播状态变量
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("dim_info", Types.STRING, Types.POJO(TableProcess.class));

        //TODO 6.将配置流进行广播
        BroadcastStream<String> broadcastDS = mysqlDS.broadcast(mapStateDescriptor);

       //TODO 7.连接广播流和数据流
        SingleOutputStreamOperator<JSONObject> dimDS = kafkaDS
                .connect(broadcastDS)
                .process(new TableProcessFunc(mapStateDescriptor));

        //TODO 8.将维度数据写入Phoenix表
       dimDS.addSink(new DimSinkFunc());

        //TODO 9.执行任务
        env.execute();
    }
}
