package com.atguigu.edu.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.DimFuntion;
import com.atguigu.edu.realtime.app.func.DimHbaseSinkFunction;
import com.atguigu.edu.realtime.beans.TableProcess;
import com.atguigu.edu.realtime.common.EduConfig;
import com.atguigu.edu.realtime.utils.HbaseUtil;
import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.util.Properties;

//Dim层数据处理
public class DimApp {
    public static void main(String[] args) {
        //TODO 1.基本环境的准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(5);
        //TODO 2.配置检查点
/*        env.enableCheckpointing(5000l);
        env.getCheckpointConfig().setCheckpointTimeout(60000l);
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000l);
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(5)));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/edu/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");*/
        //TODO 3.从kafka的topic_db中读取主流数据
        DataStreamSource<String> kafkaJsonStr = env.fromSource(MyKafkaUtil.getKafkaSource("topic_db", "dim_app_group"),
                WatermarkStrategy.noWatermarks(), "kafkasource");
        //kafkaJsonStr.print();
        //TODO 4.对主流数据进行过滤 并转换数据类型
        SingleOutputStreamOperator<JSONObject> jsonObjDs = kafkaJsonStr.process(
                new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String jsonStr, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {

                        JSONObject jsonObj = JSON.parseObject(jsonStr);
                        String type = jsonObj.getString("type");
                        if (!"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type)) {
                            out.collect(jsonObj);
                        }
                    }
                }
        );
        //jsonObjDs.print(">>>>>>>>");
        //TODO 5.使用flinkCDC 从mysql读取配置流数据
        Properties props = new Properties();
        props.setProperty("useSSL", "false");

        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("edu_config") // set captured database
                .tableList("edu_config.table_process_dim") // set captured table
                .username("root")
                .password("000000")
                .jdbcProperties(props)
                .serverTimeZone("Asia/Shanghai")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.initial())
                .build();
        DataStreamSource<String> cdcDs = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "cdc");
        //cdcDs.print("+++++++++");
        //TODO 6.转换配置流的数据类型
       SingleOutputStreamOperator<TableProcess> tableProcessDs = cdcDs.process(
                new ProcessFunction<String, TableProcess>() {
                    @Override
                    public void processElement(String jsonStr, ProcessFunction<String, TableProcess>.Context ctx, Collector<TableProcess> out) throws Exception {

                        JSONObject jsonObj = JSON.parseObject(jsonStr);
                        String op = jsonObj.getString("op");
                        TableProcess tableProcess = null;
                        if ("d".equals(op)) {
                            tableProcess = jsonObj.getObject("befire", TableProcess.class);
                        } else {
                            tableProcess = jsonObj.getObject("after", TableProcess.class);
                        }
                        tableProcess.setOp(op);
                        out.collect(tableProcess);
                    }
                }

        );
        //tableProcessDs.print("aaa");
        //TODO 7.根据配置流数据到hbase中建表,或删表
        tableProcessDs= tableProcessDs.process(
                new ProcessFunction<TableProcess, TableProcess>() {

                    private Connection connection;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        connection = HbaseUtil.getHbaseConnection();
                    }

                    @Override
                    public void close() throws Exception {
                        HbaseUtil.closeHbaseConnection(connection);
                    }

                    @Override
                    public void processElement(TableProcess orderBean, ProcessFunction<TableProcess, TableProcess>.Context ctx, Collector<TableProcess> out) throws Exception {

                        String op = orderBean.getOp();

                        String sinkTable = orderBean.getSinkTable();
                        String sinkFamily = orderBean.getSinkFamily();
                        HbaseUtil.dropTable(connection, EduConfig.HBASE_NAMESPACE, sinkTable);
                        if ("d".equals(op)) {
                            //删表
                            HbaseUtil.dropTable(connection, EduConfig.HBASE_NAMESPACE, sinkTable);
                        } else if ("c".equals(op) || "r".equals(op)) {
                            //建表
                            HbaseUtil.createHbaseTable(connection, EduConfig.HBASE_NAMESPACE, sinkTable, sinkFamily);
                        } else {
                            //先删表再建表
                            HbaseUtil.dropTable(connection, EduConfig.HBASE_NAMESPACE, sinkTable);
                            HbaseUtil.createHbaseTable(connection, EduConfig.HBASE_NAMESPACE, sinkTable, sinkFamily);
                        }
                        out.collect(orderBean);
                    }
                }
        );

        //TODO 8.将配置流转换为广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor =
                new MapStateDescriptor<String, TableProcess>("mapStateDescriptor", String.class, TableProcess.class);
        BroadcastStream<TableProcess> broadcastDs = tableProcessDs.broadcast(mapStateDescriptor);
        //TODO 9.将主流和配置流进行合流
        BroadcastConnectedStream<JSONObject, TableProcess> connectDs = jsonObjDs.connect(broadcastDs);
        //TODO 10.对主流数据进行处理,过滤出维度数据
        SingleOutputStreamOperator<JSONObject> joinDs = connectDs.process(
                new DimFuntion(mapStateDescriptor)
        );
        joinDs.print();
        //TODO 11.将维度数据写入到kafka主题中
        joinDs.addSink(new DimHbaseSinkFunction());
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
