package com.atguigu.wuliu.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.wuliu.app.func.DimSinkFunction;
import com.atguigu.wuliu.app.func.TableProcessFunction;
import com.atguigu.wuliu.bean.TableProcess;
import com.atguigu.wuliu.common.WuliuConfig;
import com.atguigu.wuliu.utils.HbaseUtil;
import com.atguigu.wuliu.utils.KafkaUtil;
import com.mysql.cj.xdevapi.JsonString;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.TableDescriptor;

import java.util.Comparator;
import java.util.Properties;

/**
 * 维度数据分流导入
 */
public class DimApp {
    public static void main(String[] args) throws Exception {
        System.setProperty("HADOOP_USER_NAME","atguigu");
        //创建流式处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //检查点设置
      /* env.enableCheckpointing(5000L,CheckpointingMode.EXACTLY_ONCE);
         env.getCheckpointConfig().setCheckpointTimeout(60000L);//超时时间
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);//job取消时删除检查点
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);//设置检查点最小时间间隔,尾到头
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1000);//设置检查点最大时间间隔
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(3),Time.seconds(3)));
        env.setStateBackend(new HashMapStateBackend()); //设置状态后端
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop100:8020/wuliu/ck");*/
        // 检查点相关设置
        //2.1 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 设置job取消后 检查点是否保留
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 设置两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        //2.6 设置状态后端
        /*env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://39.100.74.207:8020/wuliu/ck_text");*/


        //从kafka读数据
        String topic = "topic_db";
        String groupId = "dim_groupId";
        DataStreamSource<String> kafakSourceDS =
                env.fromSource(KafkaUtil.getKafkaSource(topic, groupId), WatermarkStrategy.noWatermarks(), "kafakSource");
        //kafakSourceDS.print("原始数据");
        /*{ "database":"wuliu",
        "table":"transport_task_process",
        "type":"update",
        "ts":1692101155,
        "xid":141965,
        "commit":true,
        "data":{"id":86173,"transport_task_id":97087,"cur_distance":58.50,"line_distance":694.00,"create_time":"2023-08-15 08:00:02","update_time":"2023-08-15 08:50:52","is_deleted":"0"},
        "old":{"cur_distance":52.85,
        "update_time":"2023-08-15 08:45:48"}}*/

        //数据转换 jsonStr->jsonObj,ETL去除全备份中bootstrap-start和bootstarp-complete数据
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafakSourceDS.process(
                new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String value, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                        try {
                            JSONObject jsonObject = JSON.parseObject(value);
                            String type = jsonObject.getString("type");
                            if (!"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type)) {
                                out.collect(jsonObject);
                            }
                        }catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }
        );
        //jsonObjDS.print("etl后数据");
        //通过FlinkCDC从配置表读数据
        Properties props = new Properties();
        props.setProperty("useSSL", "false");
        props.setProperty("allowPublicKeyRetrieval","false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop100")
                .port(3306)
                .databaseList("wuliu_config") // set captured database
                .tableList("wuliu_config.wuliu_config_dim") // set captured table
                .username("root")
                .password("000000")
                .jdbcProperties(props)
                .serverTimeZone("Asia/Shanghai")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.initial())
                .build();
        DataStreamSource<String> mysqlSourceDS = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysqlSource");

        //mysqlSourceDS.print("配置流原始数据");
        /* {"before":{"source_table":"adada","sink_table":"fsf","sink_family":"fsf","sink_columns":"fss","sink_row_key":"id"},
        "after":{"source_table":"sfs","sink_table":"fsf","sink_family":"fsf","sink_columns":"fss","sink_row_key":"id"},
        "source":{"version":"1.6.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1692149147000,
        "snapshot":"false","db":"wuliu_config","sequence":null,"table":"wuliu_config_dim","server_id":1,"gtid":null,
        "file":"mysql-bin.000007","pos":1539,"row":0,"thread":null,"query":null},
        "op":"u",
        "ts_ms":1692149108834,
        "transaction":null}*/
         //把配置信息封装进实体类
        SingleOutputStreamOperator<TableProcess> mysqlObjDS = mysqlSourceDS.map(new MapFunction<String, TableProcess>() {
            @Override
            public TableProcess map(String s) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                TableProcess tableProcess = null;
                String op = jsonObject.getString("op");
                if ("d".equals(op)) {
                    tableProcess = jsonObject.getObject("before", TableProcess.class);
                } else {
                    tableProcess = jsonObject.getObject("after", TableProcess.class);
                }
                tableProcess.setOp(op);
                return tableProcess;
            }
        });
        //mysqlObjDS.print("配置表实体类");
        //通过读出的配置表在hbase中创建表
        mysqlObjDS=mysqlObjDS.process(
                new ProcessFunction<TableProcess, TableProcess>() {
                    Connection hbaseConn =null;
                    @Override
                    public void open(Configuration parameters) throws Exception {
                      hbaseConn = HbaseUtil.getHbaseConnection();
                    }

                    @Override
                    public void close() throws Exception {
                        HbaseUtil.closeHbaseConnection(hbaseConn);
                    }

                    @Override
                    public void processElement(TableProcess bean,
                                               ProcessFunction<TableProcess, TableProcess>.Context ctx,
                                               Collector<TableProcess> out) throws Exception {
                       //通过操作类型选择建表、删表
                        String op = bean.getOp();
                        String sinkTable = bean.getSinkTable();
                        String[] families = bean.getSinkFamily().split(",");
                        if ("c".equals(op)||"r".equals(op)) {
                            //建表
                            HbaseUtil.createTable(hbaseConn, WuliuConfig.HBASE_NAMESPACE,sinkTable,families);
                        }else if("d".equals(op)) {
                            //删表
                            HbaseUtil.dropTable(hbaseConn,WuliuConfig.HBASE_NAMESPACE,sinkTable);
                        } else if ("u".equals(op)) {
                            //删表先再建表
                            HbaseUtil.dropTable(hbaseConn,WuliuConfig.HBASE_NAMESPACE,sinkTable);
                            HbaseUtil.createTable(hbaseConn,WuliuConfig.HBASE_NAMESPACE,sinkTable,families);

                        }


                    }
                }
        );
        //把配置表数据放入广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<String,TableProcess>("config",String.class,TableProcess.class);
        BroadcastStream<TableProcess> broadcastDS = mysqlObjDS.broadcast(mapStateDescriptor);
        //通过广播状态与主流关联
        BroadcastConnectedStream<JSONObject, TableProcess> connectDS = jsonObjDS.connect(broadcastDS);

        //对关联后的数据进行处理，过滤维度数据
        SingleOutputStreamOperator<JSONObject> processDS = connectDS.process(new TableProcessFunction(mapStateDescriptor));
        //processDS.print("最终数据");
        //id,login_name,nick_name,passwd,real_name,phone_num,email,user_level,birthday,gender,create_time,update_time,is_deleted
        //把过滤出的维度数据根据操作类型type进行存入或删除hbase中数据
        processDS.addSink(new DimSinkFunction());

    env.execute();
    }
}
