package com.twoandone.tms.realtime.app.dim;

import com.alibaba.fastjson.JSONObject;
import com.twoandone.tms.realtime.app.func.DimBroadcastProcessFunction;
import com.twoandone.tms.realtime.app.func.DimPhoenixSink;
import com.twoandone.tms.realtime.beans.TableProcess;
import com.twoandone.tms.realtime.utils.FlinkCdcUtils;
import com.twoandone.tms.realtime.utils.KafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;


/**
 * @packageName:com.twoandone.tms.realtime.app.dim
 * @ClassName:DimApp
 * @Description:
 * @author:
 * @date 2023/3/9 11:11
 **/
public class DimApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
//        StreamExecutionEnvironment env = EnvUtil.getEnvCheckPoint();
//        从kafka读数据
        String topicName = "topic_db";
        String groupId = "dim_app";
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(topicName, groupId);
        DataStreamSource<String> kafkaDs = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source");
//        kafkaDs.print("kafkaDs");
//
        SingleOutputStreamOperator<JSONObject> process = kafkaDs.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String s, Context context, Collector<JSONObject> collector) throws Exception {
                JSONObject jsonObj = JSONObject.parseObject(s);
                String type = jsonObj.getString("type");
                if (!"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type)) {
                    collector.collect(jsonObj);
                }
            }
        });

//        process.print("process");

        MySqlSource<String> mySqlDs = FlinkCdcUtils.getCDC();
        DataStreamSource<String> mysqlSource = env.fromSource(mySqlDs, WatermarkStrategy.noWatermarks(), "mysql_source");

//        mysqlSource.print("mysqlSource");


        MapStateDescriptor<String, TableProcess> mapStateDescriptor =
                new MapStateDescriptor<String, TableProcess>("mapStateDescriptor", String.class, TableProcess.class);

        BroadcastStream<String> broadcastDs = mysqlSource.broadcast(mapStateDescriptor);


        BroadcastConnectedStream<JSONObject, String> connectDs = process.connect(broadcastDs);

        SingleOutputStreamOperator<JSONObject> operator =
                connectDs.process(new DimBroadcastProcessFunction(mapStateDescriptor));

//        operator.print(">>>>");

        operator.addSink(new DimPhoenixSink());

        env.execute();
    }
}
