package com.root.edu.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.root.edu.realtime.app.func.DimSinkFunction;
import com.root.edu.realtime.app.func.TableProcessFunction;
import com.root.edu.realtime.bean.TableProcess;
import com.root.edu.realtime.util.MyKafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;


public class DimApp {
    public static void main(String[] args) throws Exception {
        //基本环境准备:流处理环境、设置并行度
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment ();
        env.setParallelism ( 4 );
        //检查点相关配置：开启检查点、设置检查点超时时间、job取消后检查点是否保留、两个检查点之间的最小时间间隔、设置重启策略、设置状态后端、设置操作Hadoop用户
        env.enableCheckpointing (5000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig ().setCheckpointTimeout ( 60000L );
        env.getCheckpointConfig ().enableExternalizedCheckpoints ( CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION );
        env.getCheckpointConfig ().setMinPauseBetweenCheckpoints ( 2000L );
        env.setRestartStrategy ( RestartStrategies.fixedDelayRestart ( 3,3000L ) );
        env.setStateBackend ( new HashMapStateBackend () );
        env.getCheckpointConfig ().setCheckpointStorage ( new JobManagerCheckpointStorage () );
        System.setProperty ("HADOOP_USER_NAME","root");
        //从Kafka的topic_db主题中读取数据
        //声明消费的主题以及消费者组，维度数据在业务数据中，所以我们要从业务数据中取
        String topic = "topic_db";
        String groupId ="dim_app_group";
        //创建消费者对象
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer ( topic, groupId );
        //消费数据，封装为流
        DataStreamSource<String> kafkaStrDS = env.addSource ( kafkaConsumer );
        kafkaStrDS.print ("kafkaStrDS >>>");
        //读取数据后，将数据类型jsonStr转换成jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map ( JSON::parseObject );
        //进行简单的数据清洗
        SingleOutputStreamOperator<JSONObject> filterDS = jsonObjDS.filter ( new FilterFunction<JSONObject> () {
            @Override
            public boolean filter(JSONObject jsonObject) throws Exception {


                try {
                    jsonObject.getJSONObject ( "data" );
                    if (jsonObject.getString ( "type" ).equals ( "bootstap-start" )
                            || jsonObject.getString ( "type" ).equals ( "bootstrap-complete" )) {
                        return false;  //bootstap-start,bootstrap-complete不是业务行为，所以要过滤掉。
                    }
                    return true;
                } catch (Exception e) {
                    e.printStackTrace ();
                    return false;
                }
            }
        } );
                    //使用flinkCDC读取配置表：连接mysql,startupOptions：FlinkCDC支持数据同步，是通过StartupOptions.initial这一属性来实现的
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder ()
                .hostname ( "hadoop101" )
                .port ( 3306 )
                .databaseList ( "edu-realtime_config" )
                .tableList ( "edu-realtime_config.table_process" )
                .username ( "root" )
                .password ( "!QAZ@wsx123" )
                .startupOptions ( StartupOptions.initial () )
                .deserializer ( new JsonDebeziumDeserializationSchema () )
                .build ();
        DataStreamSource<String> mySqlDS = env.fromSource ( mySqlSource, WatermarkStrategy.noWatermarks (), "MySQL Source" );

       // mySqlDS.print ("mySqlDS >>> ");
        //将读取到的配置信息进行广播
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<> ( "mapStateDescriptor", String.class, TableProcess.class );
        BroadcastStream<String> broadcastDS = mySqlDS.broadcast ( mapStateDescriptor );
        //将主流和广播进行关联
        BroadcastConnectedStream<JSONObject, String> connectDS = filterDS.connect ( broadcastDS );
        //对关联之后的数据 进行处理
        SingleOutputStreamOperator<JSONObject> dimDS = connectDS.process (new TableProcessFunction ( mapStateDescriptor ));
        dimDS.print(">>>>");
       //TODO 10.将维度数据写到Phoenix表中
        dimDS.addSink (new DimSinkFunction ());

        env.execute();
    }

}
