package com.atguigu.app.dim;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.TableProcess;
import com.atguigu.func.DimSinkFunction;
import com.atguigu.func.DimTableProcessFunction;
import com.atguigu.utils.KafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * ClassName: DimApp
 * Package: com.atguigu.app.dim
 * Description:
 *
 * @Author Lovxy
 * @Create 2023/5/15 14:13
 * @Version 1.0
 */
public class DimApp {
    //数据流:web/app -> Mysql -> Maxwell -> Kafka(ODS) -> FlinkApp -> Phoenix(DIM)
//程 序:Mock -> Mysql -> Maxwell -> Kafka(ZK) -> DimApp(FlinkCDC) -> Phoenix(HBase ZK/HDFS)
        public static void main(String[] args) throws Exception {
            //1.环境准备
            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.setParallelism(1);
            //2.状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS),Time.of(3L,TimeUnit.MINUTES)));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME","atguigu");
            //读取业务主流
            String topic ="topic_db";
            String groupId="dim_sink_app";
            KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(topic, groupId);
            DataStreamSource<String> kafkaDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source");



            //TODO 3.判断数据是否为JSON格式,如果不是则输出到侧输出流,同时转换为JSON对象

            OutputTag<String> outputTag = new OutputTag<String>("Dirty") {
            };
            SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
                @Override
                public void processElement(String s, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                    try {
                        if (s != null) {
                            JSONObject jsonObject = JSONObject.parseObject(s);
                            out.collect(jsonObject);
                        }
                    } catch (Exception e) {
                        ctx.output(outputTag, s);
                    }
                }
            });
            jsonObjDS.getSideOutput(outputTag).print("Dirty>>>>>>");



            MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                    .hostname("hadoop102")
                    .port(3306)
                    .username("root")
                    .password("000000")
                    .databaseList("edu_config")
                    .tableList("edu_config.table_process")
                    .deserializer(new JsonDebeziumDeserializationSchema())
                    .startupOptions(StartupOptions.initial())
                    .build();
            //TODO 4.使用FlinkCDC读取MySQL中的配置信息创建配置流
            DataStreamSource<String> mysqlDSSource = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-source");

            MapStateDescriptor<String, TableProcess> stateDescriptor = new MapStateDescriptor<>("table-process-state", String.class, TableProcess.class);
            //TODO 5.将配置流转换为广播流
            BroadcastStream<String> broadcastDS = mysqlDSSource.broadcast(stateDescriptor);
            //TODO 6.连接主流与广播流
            BroadcastConnectedStream<JSONObject, String> connectedStream = jsonObjDS.connect(broadcastDS);


            //TODO 7.处理,根据广播状态内容过滤主流数据
            SingleOutputStreamOperator<JSONObject> hbaseDS = connectedStream.process(new DimTableProcessFunction(stateDescriptor));


            //8.将数据写出到Phoenix
            hbaseDS.print("DS>>>");
           hbaseDS.addSink(new DimSinkFunction());

            //9.启动任务
            env.execute("DimSinkApp");

        }
}
