package com.bw.gmall.reatime.dim.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.common.base.BaseApp;
import com.bw.gmall.realtime.common.bean.TableProcessDim;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.FlinkSourceUtil;
import com.bw.gmall.realtime.common.util.HbaseUtil;
import com.bw.gmall.realtime.common.util.JdbcUtil;
//import com.bw.gmall.realtime.dim.function.DimSinkFunction;
import com.bw.gmall.reatime.dim.function.DimSinkFunction;
import lombok.extern.log4j.Log4j2;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;

@Log4j2
public class DimApp  extends BaseApp {
    public static void main(String[] args) throws Exception {
        new DimApp().start(10001,4, Constant.TOPIC_DB,Constant.TOPIC_DB);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
        // 1.ETL 清洗
        SingleOutputStreamOperator<String> etlStream = stream.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String s, Collector<String> collector) throws Exception {
                // 转换数据结构
                // 清洗的逻辑，就看下游要哪些数据
                try {
                    JSONObject jsonObject = JSON.parseObject(s);
                    String type = jsonObject.getString("type");
                    String db = jsonObject.getString("database");
                    String data = jsonObject.getString("data");
                    if (data != null) {
                        if ("gmall".equals(db) && !"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type) && data != null && data.length() > 0) {
                            collector.collect(s);
                        }
                    }
                } catch (Exception e) {
                    log.error("出现异常了");
                }
            }
        });
        // 2.通过CDC读取配置表  并行度为1
        DataStreamSource<String> processStream = env.fromSource(FlinkSourceUtil.getMysqlSource(Constant.PROCESS_DATABASE, Constant.PROCESS_DIM_TABLE_NAME), WatermarkStrategy.noWatermarks(), "mysql_source").setParallelism(1);
//        processStream.print();
        // 3.在Hbase中创建表
        // 链接Hbase  用原生语法或者phoniex都行
        // 要链接外部系统，必须用Rich或者process
        SingleOutputStreamOperator<TableProcessDim> createTableStrem = processStream.flatMap(new RichFlatMapFunction<String, TableProcessDim>() {
            private Connection connection;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 开始连接Hbase
                connection = HbaseUtil.getHbaseConnect();
            }

            @Override
            public void close() throws Exception {
                HbaseUtil.closeHBaseConn(connection);
            }

            @Override
            public void flatMap(String s, Collector<TableProcessDim> collector) throws Exception {
                TableProcessDim dim = null;
                // 转成JsonObject
                JSONObject jsonObject = JSONObject.parseObject(s);
                // op--> c r u d
                String op = jsonObject.getString("op");
                if ("d".equals(op)) {
                    // 删除配置表数据
                    dim = jsonObject.getObject("before", TableProcessDim.class);
                    // 删除Hbase对应的表
                    HbaseUtil.dropHBaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable());
                } else if ("c".equals(op) || "r".equals(op)) {
                    dim = jsonObject.getObject("after", TableProcessDim.class);
                    //在Hbase创建表
                    HbaseUtil.createHBaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable(), dim.getSinkFamily());
                } else {
                    dim = jsonObject.getObject("after", TableProcessDim.class);
                    // 在Hbase 先删除 在创建
                    HbaseUtil.dropHBaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable());
                    HbaseUtil.createHBaseTable(connection, Constant.HBASE_NAMESPACE, dim.getSinkTable(), dim.getSinkFamily());

                }
                // 把操作赋值
                dim.setOp(op);
            }
        });

        //3.广播流
        MapStateDescriptor<String,TableProcessDim> stateDescriptor = new MapStateDescriptor<String, TableProcessDim>("broadcast_state",String.class,TableProcessDim.class);
        BroadcastStream<TableProcessDim> broadcastStream = createTableStrem.broadcast(stateDescriptor);
        // 连接流
        BroadcastConnectedStream<String, TableProcessDim> connectedStream = etlStream.connect(broadcastStream);
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> connectStream = connectedStream.process(new BroadcastProcessFunction<String, TableProcessDim, Tuple2<JSONObject, TableProcessDim>>() {
            private HashMap<String, TableProcessDim> hashMap = new HashMap<>();

            @Override
            public void open(Configuration parameters) throws Exception {
                // 把配置表数据先缓存,解决 主流比广播流先来的先来的问题
                // 通过JDBC
                java.sql.Connection mysqlConnection = JdbcUtil.getMysqlConnection();
                List<TableProcessDim> tableProcessDims = JdbcUtil.queryList(mysqlConnection, "select * from gmall2023_config.table_process_dim", TableProcessDim.class, true);
                for (TableProcessDim tableProcessDim : tableProcessDims) {
                    hashMap.put(tableProcessDim.getSourceTable(), tableProcessDim);
                }
                JdbcUtil.closeConnection(mysqlConnection);

            }

            // 主流和广播流没有先后顺序
            // 广播流原理的其实存在状态中
            @Override
            public void processElement(String s, BroadcastProcessFunction<String, TableProcessDim, Tuple2<JSONObject, TableProcessDim>>.ReadOnlyContext readOnlyContext, Collector<Tuple2<JSONObject, TableProcessDim>> collector) throws Exception {
                // 处理主流
                // 从广播状态取出来
                ReadOnlyBroadcastState<String, TableProcessDim> broadcastState = readOnlyContext.getBroadcastState(stateDescriptor);
                JSONObject jsonObject = JSONObject.parseObject(s);
                String table = jsonObject.getString("table");
                // 取广播状态的值
                TableProcessDim tableProcessDim = broadcastState.get(table);
                // 把主流中的事实流过滤掉, 只要从广播状态取出来，那么一定是维度数据，只要维度数据流 ，咱们就要
                // tableProcessDim 取不出来有两种原因
                // 1. 主流事实表的流
                // 2. 主流先来， 可以从map取，如果map都取不到，那主流就是事实流
                if (tableProcessDim == null) {
                    tableProcessDim = hashMap.get(table);
                }
                if (tableProcessDim != null) {
                    collector.collect(Tuple2.of(jsonObject, tableProcessDim));
                }

            }

            @Override
            public void processBroadcastElement(TableProcessDim tableProcessDim, BroadcastProcessFunction<String, TableProcessDim, Tuple2<JSONObject, TableProcessDim>>.Context context, Collector<Tuple2<JSONObject, TableProcessDim>> collector) throws Exception {
                // 处理广播流
                // 获取广播状态
                // <activity_info,tableProcessDim(activity_info,dim_activity_info,info,"id,activity_name,activity_type,activity_desc,start_time,end_time,create_time",id)>
                // <activity_rule,tableProcessDim(activity_rule,dim_activity_rule,info,"id,activity_name,activity_type,activity_desc,start_time,end_time,create_time",id)>
                // <activity_sku,tableProcessDim(activity_sku,dim_activity_sku,info,"id,activity_name,activity_type,activity_desc,start_time,end_time,create_time",id)>
                BroadcastState<String, TableProcessDim> broadcastState = context.getBroadcastState(stateDescriptor);
                // 拿到广播流配置表中的SourceTable字段
                String sourceTable = tableProcessDim.getSourceTable();
                // 拿到配置表的操作 c r u d
                String op = tableProcessDim.getOp();
                if ("d".equals(op)) {
                    hashMap.remove(sourceTable);// 删除对应的缓存
                    broadcastState.remove(sourceTable);
                } else {
                    broadcastState.put(sourceTable, tableProcessDim);
                }
            }
        });
        // 4.过滤字段data
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> filterStream  = connectStream.map(new MapFunction<Tuple2<JSONObject, TableProcessDim>, Tuple2<JSONObject, TableProcessDim>>() {

            // TableProcessDim 广播流数据
            @Override
            public Tuple2<JSONObject, TableProcessDim> map(Tuple2<JSONObject, TableProcessDim> processDimTuple2) throws Exception {
                // JSONObject 主流数据 user_info
                //{"database":"gmall","table":"user_info","type":"bootstrap-insert","ts":1655002894,"data":{"id":1312,"login_name":"iohd88ax9","nick_name":"欢欢","passwd":null,"name":"毛欢","phone_num":"13466822244","email":"iohd88ax9@163.com","head_img":null,"user_level":"1","birthday":"1984-01-12","gender":"F","create_time":"2022-06-12 04:49:34","operate_time":null,"status":null}}
                JSONObject f0 = processDimTuple2.f0;
                // TableProcessDim 广播流数据 user_info配置表
                TableProcessDim f1 = processDimTuple2.f1;
                //id,login_name,name,phone_num,email,user_level,birthday,gender,create_time,operate_time
                String sinkColumns = f1.getSinkColumns();
                // [id,login_name,name,phone_num]
                List<String> columns = Arrays.asList(sinkColumns.split(","));
                // 主流的data数据
                //{"id":1312,"login_name":"iohd88ax9","nick_name":"欢欢","passwd":null,"name":"毛欢","phone_num":"13466822244","email":"iohd88ax9@163.com","head_img":null,"user_level":"1","birthday":"1984-01-12","gender":"F","create_time":"2022-06-12 04:49:34","operate_time":null,"status":null}
                JSONObject data = f0.getJSONObject("data");
                // 删除不要的数据
                data.keySet().removeIf(key -> !columns.contains(key));
                return processDimTuple2;
            }
        });
        // 5. 写入Hbase
        filterStream.addSink(new DimSinkFunction());
    }
}

