package com.zy.gmall.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.zy.gmall.realtime.app.func.MySinkRichFunction;
import com.zy.gmall.realtime.app.func.TableProcessFunction;
import com.zy.gmall.realtime.bean.TableProcess;
import com.zy.gmall.realtime.common.GmallConfig;
import com.zy.gmall.realtime.util.HbaseUtil;
import com.zy.gmall.realtime.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.util.Properties;

public class DimApp {
    public static void main(String[] args) {
        /**
         * 步骤
         */
        //1 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(4);//学习的并行度
        //2 设置检查点
        env.enableCheckpointing(5000);
        //重启策略 int最大值次 设置(次数重启，故障率重启)
        //env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,Time.seconds(3)));//重启次数
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(5)));//故障率

        //精确一次
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //检查点最小间隔
        env.getCheckpointConfig().setCheckpointTimeout(500);
        //超时处理
        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(100000);
        //检查点是否保存
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(
                CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION
        );
         //状态后端
        //env.setStateBackend(new HashMapStateBackend());//内存
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");//！！！！！！！！！！！！！！！注意
        //权限不足
        System.setProperty("HADOOP_USER_NAME","atguigu");


        //3 创建输入流，接收kafka中的数据 binlog ——》 maxwell ——》 kafka   topic_db主题  !!!注意精确一次
        KafkaSource kafkaSource = KafkaUtil.getKafkaSource("topic_db", "dim_app_group");

        DataStreamSource<String> kafkaDs = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "Kafka Source");
        //4 格式转换JsonString -> JsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDs = kafkaDs.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String s, ProcessFunction<String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                //将json字符串转换为jsonObj
                try {
                    JSONObject jsonObj = JSON.parseObject(s);
                    String type = jsonObj.getString("type");
                    if (!"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type)) {
                        collector.collect(jsonObj);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
       // jsonObjDs.print().name(">>>>");

        //5 创建FlinkCDC流
        Properties props = new Properties();
        props.setProperty("useSSL","true");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("gmall_config") // set captured database
                .tableList("gmall_config.table_process_dim") // set captured table
                .username("root")
                .password("000000")
                .jdbcProperties(props)
                .serverTimeZone("Asia/Shanghai")
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .startupOptions(StartupOptions.initial())
                .build();
        DataStreamSource<String> flinkCdc = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "Flink CDC");

        //flinkCdc.print().name(">>>");



        //创建实体类对象
        SingleOutputStreamOperator<TableProcess> tableProcessDS = flinkCdc.process(
                new ProcessFunction<String, TableProcess>() {
                    @Override
                    public void processElement(String jsonStr, Context ctx, Collector<TableProcess> out) throws Exception {
                        //为了处理方便，先将当前jsonStr转换为jsonObj
                        JSONObject jsonObj = JSON.parseObject(jsonStr);
                        //获取对配置表进行的操作类型
                        String op = jsonObj.getString("op");
                        TableProcess tableProcess = null;
                        //判断是否为删除操作
                        if (op.equals("d")) {
                            //删除操作：从before属性中获取配置信息
                            tableProcess = jsonObj.getObject("before", TableProcess.class);
                        } else {
                            //删除外的其它操作：从after属性中获取配置信息
                            tableProcess = jsonObj.getObject("after", TableProcess.class);
                        }
                        tableProcess.setOp(op);
                        out.collect(tableProcess);
                    }
                }
        );
        //tableProcessDS.print(">>>");

        //6 Hbase中创建表
        tableProcessDS = tableProcessDS.map(
        new RichMapFunction<TableProcess, TableProcess>() {

            //Connection conn = HbaseUtil.getConnect();

            private Connection conn;
            @Override
            public void open(Configuration parameters) throws Exception {
                conn = HbaseUtil.getConnect();
            }

            @Override
            public void close() throws Exception {
                HbaseUtil.closeHbase(conn);
            }
            @Override
            public TableProcess map(TableProcess tableProcess) throws Exception {
                //在这里面进行操作
                String op = tableProcess.getOp();
                //表明
                String sinkTable = tableProcess.getSinkTable();
                //列族
                String sinkFamily = tableProcess.getSinkFamily();

                if (op.equals("u")) {
                    //删除   创建
                    HbaseUtil.deleteHbaseTable(conn, GmallConfig.HBASE_NAMESPACE, sinkTable);
                    HbaseUtil.createHbaseTable(conn, GmallConfig.HBASE_NAMESPACE, sinkTable, sinkFamily.split(","));
                } else if (op.equals("d")) {
                    // 删除
                    HbaseUtil.deleteHbaseTable(conn, GmallConfig.HBASE_NAMESPACE, sinkTable);
                } else {
                    //创建
                    HbaseUtil.createHbaseTable(conn, GmallConfig.HBASE_NAMESPACE, sinkTable, sinkFamily.split(","));
                }
                return tableProcess;
            }
        });
        //tableProcessDS.print(">>>>");
        //7 广播流 将配置流作为广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("mapStateDescriptor", String.class, TableProcess.class);
        BroadcastStream<TableProcess> broadcastStream = tableProcessDS.broadcast(mapStateDescriptor);
        //8 流connect 主流connect广播流
        BroadcastConnectedStream<JSONObject, TableProcess> connectedStream = jsonObjDs.connect(broadcastStream);
        //9 处理数据，拿到过滤出维度信息
        SingleOutputStreamOperator<JSONObject> process = connectedStream.process(
                new TableProcessFunction(mapStateDescriptor)
        );

        System.out.println("________________________10________________________");
        //10 放入Hbase
        //判断处理类型 d 删除 其他 新增
        process.print(">>>");
        process.addSink(new MySinkRichFunction());
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
