package com.bw.gmall.realtime.dim.App;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.common.Util.FlinkSourceUtil;
import com.bw.gmall.realtime.common.Util.HbaseUtil;
import com.bw.gmall.realtime.common.Util.JdbcUtil;
import com.bw.gmall.realtime.common.base.BaseApp;
import com.bw.gmall.realtime.common.bean.TableProcessDim;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.dim.Function.DimSinkFunction;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;


import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;


public class DimApp extends BaseApp {

    public static void main(String[] args) throws Exception {
        new DimApp().start(1001,4, Constant.TOPIC_DB,Constant.TOPIC_DB);
    }


    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) throws Exception {

        //进行数据清理  ETL
        SingleOutputStreamOperator<String> etlStream = stream.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String s, Collector<String> collector) throws Exception {

                try {
                    JSONObject json = JSON.parseObject(s);
                    String type = json.getString("type");
                    String db = json.getString("database");
                    String data = json.getString("data");
                    if (data != null) {
                        if ("gmall".equals(db) && !"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type) && data.length() > 0) {
                            collector.collect(s);
                        }
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });



//        读取配置表  设置并行度为1
        DataStreamSource<String> processStream  = env.fromSource(FlinkSourceUtil.getMysqlSource(Constant.PROCESS_DATABASE, Constant.PROCESS_DIM_TABLE_NAME),
                WatermarkStrategy.noWatermarks(), "mysql_source").setParallelism(1);


//        在HBASE中创表

        SingleOutputStreamOperator<TableProcessDim> createStream = (SingleOutputStreamOperator<TableProcessDim>) processStream.flatMap(new RichFlatMapFunction<String, TableProcessDim>() {

            private Connection connection;

            @Override
            public void open(Configuration parameters) throws Exception {

                connection = HbaseUtil.getHbaseConnection();
            }

            @Override
            public void close() throws Exception {
                HbaseUtil.closeConnection(connection);
            }

            @Override
            public void flatMap(String s, Collector<TableProcessDim> collector) throws Exception {

                TableProcessDim dim = null;

                //   转换成jsonObject
                JSONObject json = JSON.parseObject(s);
                //    op -> c u r d

                String op = json.getString("op");

                if ("d".equals(op)) {
                    //     删除配置表数据
                    dim = json.getObject("before", TableProcessDim.class);
                    //     删除hbase对应表
                    HbaseUtil.dropHbaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable());
                } else if ("c".equals(op) || "r".equals(op)) {
                    dim = json.getObject("before", TableProcessDim.class);
                    HbaseUtil.createHbaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable(), dim.getSinkFamily());

                } else {
                    dim = json.getObject("after", TableProcessDim.class);
                    //     在hbase先删除后创建

                    HbaseUtil.dropHbaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable());
                    HbaseUtil.createHbaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable(), dim.getSinkFamily());

                }
                dim.setOp(op);
            }
        });


        //      创建广播流
        MapStateDescriptor<String,TableProcessDim> state = new MapStateDescriptor<String, TableProcessDim>("broadcast_state",String.class,TableProcessDim.class);

        //        连接流
        BroadcastStream<TableProcessDim> broadcast = createStream.broadcast(state);

        BroadcastConnectedStream<String, TableProcessDim> connect = etlStream.connect(broadcast);

        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> connectStream = connect.process(new BroadcastProcessFunction<String, TableProcessDim, Tuple2<JSONObject, TableProcessDim>>() {

            private HashMap<String, TableProcessDim> hashmap = new HashMap<>();

            @Override
            public void open(Configuration parameters) throws Exception {

//            把配置表数据缓存，解决主流比广播流先来的问题
//                通过JDBC
                java.sql.Connection MySqlConn = JdbcUtil.getMysqlConnection();
                String sql = "select * from gmall2023_config.table_process_dim";
                List<TableProcessDim> tableProcessDims = JdbcUtil.queryList(MySqlConn, sql, TableProcessDim.class, true);

                for (TableProcessDim tableProcessDim : tableProcessDims) {
                    hashmap.put(tableProcessDim.getSourceTable(), tableProcessDim);
                }

                JdbcUtil.closeConnection(MySqlConn);

            }

            @Override
            public void processElement(String s, BroadcastProcessFunction<String, TableProcessDim, Tuple2<JSONObject, TableProcessDim>>.ReadOnlyContext readOnlyContext, Collector<Tuple2<JSONObject, TableProcessDim>> collector) throws Exception {

                //    处理主流
                //    从广播流状态取出


                ReadOnlyBroadcastState<String, TableProcessDim> broadcastState = readOnlyContext.getBroadcastState(state);
                JSONObject jsonObject = JSONObject.parseObject(s);
                String table = jsonObject.getString("table");
                //    取出广播状态

                TableProcessDim tableProcessDim = broadcastState.get(table);


                // 把主流中的事实流过滤掉, 只要从广播状态取出来，那么一定是维度数据，只要维度数据流 ，咱们就要
                // tableProcessDim 取不出来有两种原因
                // 1. 主流事实表的流
                // 2. 主流先来， 可以从map取，如果map都取不到，那主流就是事实流
                if (tableProcessDim != null) {

                    tableProcessDim = hashmap.get(table);
                }
                if (tableProcessDim != null) {
                    collector.collect(Tuple2.of(jsonObject, tableProcessDim));
                }


            }

            @Override
            public void processBroadcastElement(TableProcessDim tableProcessDim, BroadcastProcessFunction<String, TableProcessDim, Tuple2<JSONObject, TableProcessDim>>.Context context, Collector<Tuple2<JSONObject, TableProcessDim>> collector) throws Exception {

                // 处理广播流
                // 获取广播状态

                BroadcastState<String, TableProcessDim> broadcastState = context.getBroadcastState(state);

                // 拿到广播流配置表中的sourceTable 字段
                String sourceTable = tableProcessDim.getSourceTable();
                //  拿到配置表的操作c u r d
                String op = tableProcessDim.getOp();
                if ("d".equals(op)) {

                    hashmap.remove(sourceTable);
                    broadcastState.remove(sourceTable);

                } else {

                    broadcastState.put(sourceTable, tableProcessDim);
                }


            }
        });


//        4.过滤字段data
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> fiterStream = connectStream.map(new MapFunction<Tuple2<JSONObject, TableProcessDim>, Tuple2<JSONObject, TableProcessDim>>() {
            @Override
            public Tuple2<JSONObject, TableProcessDim> map(Tuple2<JSONObject, TableProcessDim> processDimTuple) throws Exception {

//         广播流数据

//                主流数据

                // JSONObject 主流数据 user_info
                //{"database":"gmall","table":"user_info","type":"bootstrap-insert","ts":1655002894,"data":{"id":1312,"login_name":"iohd88ax9","nick_name":"欢欢","passwd":null,"name":"毛欢","phone_num":"13466822244","email":"iohd88ax9@163.com","head_img":null,"user_level":"1","birthday":"1984-01-12","gender":"F","create_time":"2022-06-12 04:49:34","operate_time":null,"status":null}}
                JSONObject f0 = processDimTuple.f0;
//                配置数据
                TableProcessDim f1 = processDimTuple.f1;

                //id,login_name,name,phone_num,email,user_level,birthday,gender,create_time,operate_time

                String sinkColumns = f1.getSinkColumns();

                // [id,login_name,name,phone_num]
                List<String> columns = Arrays.asList(sinkColumns.split(","));

                // 主流的data数据
                //{"id":1312,"login_name":"iohd88ax9","nick_name":"欢欢","passwd":null,"name":"毛欢","phone_num":"13466822244","email":"iohd88ax9@163.com","head_img":null,"user_level":"1","birthday":"1984-01-12","gender":"F","create_time":"2022-06-12 04:49:34","operate_time":null,"status":null}

                JSONObject data = f0.getJSONObject("data");

//                删除不要的数据
                data.keySet().removeIf(key -> !columns.contains(key));

                return processDimTuple;
            }
        });


//        5.写入Hbase
        fiterStream.addSink(new DimSinkFunction());

//    stream.print();

    }
}
