package com.atguigu.edu.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.DimSinkFunction;
import com.atguigu.edu.realtime.app.func.TableProcessFunction;
import com.atguigu.edu.realtime.common.EduConfig;
import com.atguigu.edu.realtime.bean.TableProcess;
import com.atguigu.edu.realtime.util.HbaseUtil;
import com.atguigu.edu.realtime.util.KafkaUtils;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.util.Properties;

public class DimApp {
    public static void main(String[] args) {
        //1 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(4);

        //2 检查点设置
        //开启
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //最小间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //重启策略   故障率
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(5)));
        //超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        // 允许两个连续的 checkpoint 错误
//        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(2);
//        // 使用 externalized checkpoints，这样 checkpoint 在作业取消后仍就会被保留
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
//        env.setStateBackend(new HashMapStateBackend());
          //设置状态后端
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/edu/ck");
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        //3 从kafka读取数据，Kafka工具类获取kafka topic数据
        String topic = "topic_db";
        String groupId = "test";
        KafkaSource<String> kafkaSource = KafkaUtils.getKafkaSource(topic, groupId);
        DataStreamSource<String> kafkaSourceDs = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source");
        //kafkaSourceDs.print(">>>>>>");
        //数据类型转换 JSONString -> JSONObject
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaSourceDs.process(
                new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String jsonStr, Context ctx, Collector<JSONObject> out) throws Exception {
                        //过滤掉错误数据
                        try {
                            JSONObject jsonObj = JSON.parseObject(jsonStr);
                            String type = jsonObj.getString("type");
                            //获取维度表 全量同步数据
                            if(!"bootstrap-start".equals(type)&&!"bootstrap-complete".equals(type)){
                                out.collect(jsonObj);
                            }
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }
        );

        //4 配置flinkCDC读取配置表, 需要隧道
        Properties props = new Properties();
        props.setProperty("useSSl", "false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("edu_config") // set captured database
                .tableList("edu_config.table_process_dim") // set captured table
                .username("root")
                .password("000000")
                //.jdbcProperties(props)
                .serverTimeZone("Asia/Shanghai")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.initial())
                .build();
        DataStreamSource<String> flinkCDCDS = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "flinkCDC");
        //数据类型转换 JSONString -> TableProcess
        SingleOutputStreamOperator<TableProcess> processDs = flinkCDCDS.process(new ProcessFunction<String, TableProcess>() {
            @Override
            public void processElement(String str, ProcessFunction<String, TableProcess>.Context context, Collector<TableProcess> collector) throws Exception {
                //转为JSONObject
                JSONObject jsonObj = JSON.parseObject(str);
                //获取操作类型
                String op = jsonObj.getString("op");
                //创建实体类对象
                TableProcess tableProcess = null;
                if (op.equals("d")) {
                    //删除操作  before{*******}  after{}
                    tableProcess = jsonObj.getObject("before", TableProcess.class);
                } else {
                    //增改查操作  before{*******}  after{*******}
                    tableProcess = jsonObj.getObject("after", TableProcess.class);
                }
                //补充实体类对象中的ob属性
                tableProcess.setOp(op);
                //传递向下流
                collector.collect(tableProcess);
            }
        });
        processDs.print("flinkCDC:");

        //5 在Hbase中进行维度表的创建
         processDs = processDs.process(new ProcessFunction<TableProcess, TableProcess>() {
            Connection conn = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                //获取链接
                conn = HbaseUtil.getHbaseConnect();
            }

            @Override
            public void close() throws Exception {
                //关闭连接
                HbaseUtil.closeHbaseConnect(conn);
            }

            @Override
            public void processElement(TableProcess tableProcess, ProcessFunction<TableProcess, TableProcess>.Context context, Collector<TableProcess> collector) throws Exception {
                //在Hbase中创建维度表
                //获取操作类型
                String op = tableProcess.getOp();
                String sinkTable = tableProcess.getSinkTable();
                String sinkFamily = tableProcess.getSinkFamily();
                if (op.equals("d")) {
                    //如果是删除操作，从Hbase中删除维度表
                    HbaseUtil.deleteHbaseTables(conn, EduConfig.HBASE_NAMESPACE, sinkTable);
                } else if (op.equals("c") || op.equals("r")) {
                    //如果是创建 或者是读取 就从Hbase中创建
                    HbaseUtil.createHbaseTables(conn, EduConfig.HBASE_NAMESPACE, sinkTable, sinkFamily.split(","));
                } else {
                    //剩下u 更新操作 先删除老维度表 再创建新维度表
                    HbaseUtil.deleteHbaseTables(conn, EduConfig.HBASE_NAMESPACE, sinkTable);
                    HbaseUtil.createHbaseTables(conn, EduConfig.HBASE_NAMESPACE, sinkTable, sinkFamily.split(","));
                }
                //向下游传递
                collector.collect(tableProcess);
            }
        });
         //processDs.print("BuildHbaseTable:");
        //6 创建广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor
                = new MapStateDescriptor<String, TableProcess>("mapStateDescriptor",String.class,TableProcess.class);
        BroadcastStream<TableProcess> broadcastDS = processDs.broadcast(mapStateDescriptor);
        //7 连接
        BroadcastConnectedStream<JSONObject, TableProcess> connectDs = jsonObjDS.connect(broadcastDS);
        //8 对关联后的数据进行处理  过滤出维度数据
        SingleOutputStreamOperator<JSONObject> dimDS = connectDs.process(new TableProcessFunction(mapStateDescriptor));

        //dimDS.print("dimDS:");
        //9 将维度数据写入Hbase
        dimDS.addSink(new DimSinkFunction());

        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
