package com.atguigu.gmall.realtime.app.dwd.db;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseApp;
import com.atguigu.gmall.realtime.bean.TableProcess;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.gmall.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.util.*;

/**
 * @Author lzc
 * @Date 2023/1/4 10:26
 */
public class Dwd_08_DwdBaseDb extends BaseApp {
    public static void main(String[] args) {
        new Dwd_08_DwdBaseDb().init(
            3008,
            2,
            "Dwd_08_DwdBaseDb",
            Constant.TOPIC_ODS_DB
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
        // 1. 对数据做 etl
        SingleOutputStreamOperator<JSONObject> dataStream = etl(stream);
        // 2. 读取配置表, 封装到 TableProcess
        SingleOutputStreamOperator<TableProcess> tpStream = readTableProcess(env);
        // 3. 数据流和配置流做 connect
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dwdAndTpStream = connect(dataStream, tpStream);
        // 4. 过滤掉不需要的字段
        dwdAndTpStream = delNotNeedColumns(dwdAndTpStream);
        // 5. 事实表数据写入到不同的 topic 中
        writeToKafka(dwdAndTpStream);
    }
    
    private void writeToKafka(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> stream) {
        stream.addSink(FlinkSinkUtil.getKafkaSink());
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> delNotNeedColumns(
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dimAndTpStream) {
        return dimAndTpStream
            .map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
                @Override
                public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> t) throws Exception {
                    JSONObject data = t.f0;
                    List<String> columns = Arrays.asList(t.f1.getSinkColumns().split(","));
                    
                    Set<String> keys = data.keySet();
                    keys.removeIf(key -> !columns.contains(key) && !"op_type".equals(key));
                    return t;
                }
            });
    }
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(
        SingleOutputStreamOperator<JSONObject> dataStream,
        SingleOutputStreamOperator<TableProcess> tpStream) {
        // 1. 把配置流做成广播流
        // 广播状态如何保存?
        // key: string   表名:ALL
        // value: TableProcess
        MapStateDescriptor<String, TableProcess> tpStateDesc = new MapStateDescriptor<>("tpStateDesc", String.class, TableProcess.class);
        BroadcastStream<TableProcess> tpBcStream = tpStream.broadcast(tpStateDesc);
        // 2. 数据流去 connect 广播流
        return dataStream
            .connect(tpBcStream)
            .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
    
                private Map<String, TableProcess> tpMap;
    
                @Override
                public void open(Configuration parameters) throws Exception {
                    // 预加载: 每个并行度加载所有的配置信息
                    Connection conn = JdbcUtil.getMysqlConnection();
                    // 执行一个查询语句: 结果一帮是多行多列
                    /*
                        id  name  age
                        1    lisi  10    用一个对象来封装
                        2    zs    11
                        ...
                     */
                    String querySql = "select * from gmall_config.table_process where sink_type=?";
                    Object[] arg = new Object[]{"dwd"};
                    List<TableProcess> tpList = JdbcUtil.queryList(conn, querySql, arg, TableProcess.class, true);
                    // 遍历 list 集合, 把配置信息存入到 HashMap中
                    tpMap = new HashMap<>();
    
                    for (TableProcess tp : tpList) {
                        String key = tp.getSourceTable()
                            + ":" + tp.getSourceType()
                            + ":" + (tp.getSinkExtend() == null ? "" : tp.getSinkExtend());
                        tpMap.put(key, tp);
                    }
                    System.out.println("预加载完成: " + tpMap);
                    
                    // 关闭链接
                    JdbcUtil.closeConnection(conn);
                    
                }
    
                // 4. 处理数据流中的数据的时候, 从广播状态获取配置信息
                @Override
                public void processElement(JSONObject obj,
                                           ReadOnlyContext ctx,
                                           Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    ReadOnlyBroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    String table = obj.getString("table");
                    String type = obj.getString("type");
                    // user_info:insert
                    String key = table + ":" + type + ":";
                    // 优惠券使用表: 当是 update 的时候, 需要特殊处理
                    if ("coupon_use".equals(table) && "update".equals(type)) {
                        JSONObject data = obj.getJSONObject("data");
                        JSONObject old = obj.getJSONObject("old");
                        if ("1401".equals(old.getString("coupon_status")) && "1402".equals(data.getString("coupon_status"))) {
                            // 优惠券下单
                            key += "{\"data\": {\"coupon_status\": \"1402\"}, \"old\": {\"coupon_status\": \"1401\"}}";
                        } else if ("1402".equals(old.getString("coupon_status")) && "1403".equals(data.getString("coupon_status"))) {
                            // 优惠券支付
                            key += "{\"data\": {\"used_time\": \"not null\"}}";
                        }
                    }
                    
                    TableProcess tp = state.get(key);
                    // 先从状态差, 先从 Map 中查? 先状态
                    if (tp == null) {
                        System.out.println("状态没有找到, 从 map 集合中找");
                        // 状态中没有
                        tp = tpMap.get(key);
    
                        if (tp == null) {
                            System.out.println(" map 集合也没有找到");
                        }
                    }
                    // obj可能是维度数据,也可能是事实表数据, 所以 tp 有可能是空
                    if (tp != null) {
                        JSONObject data = obj.getJSONObject("data");
                        // 从 data 中无法得到这条数据是 insert 还是 update, 到后期需要对维度数据不同的操作, 进行不同的处理. 需要把 type 写入到 data 中
                        // 后期要用
                        data.put("op_type", obj.getString("type"));
                        out.collect(Tuple2.of(data, tp));
                    }
                }
                
                // 3. 把配置信息放入到广播状态
                @Override
                public void processBroadcastElement(TableProcess tp,
                                                    Context ctx,
                                                    Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    // 获取广播状态
                    BroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    // 放入  key:表名:insert:...
                    String key = tp.getSourceTable()
                        + ":" + tp.getSourceType()
                        + ":" + (tp.getSinkExtend() == null ? "" : tp.getSinkExtend());
                    
                    // 如果 op=d, 表示删除对应的配置信息, 这个时候,也应该删除广播状态中的数据
                    if ("d".equals(tp.getOp())) {
                        state.remove(key);
                        tpMap.remove(key);
                    } else {
                        state.put(key, tp);
                    }
                }
            });
        
        
    }
    
    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {
        // cdc: 程序已启动, 默认t先读取所有的数据(快照),然后根据 binlog 读取变化的数据
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
            .hostname("hadoop162")
            .port(3306)
            .databaseList("gmall_config") // set captured database, If you need to synchronize the whole database, Please set tableList to ".*".
            .tableList("gmall_config.table_process") // set captured table
            .username("root")
            .password("aaaaaa")
            .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
            .build();
        
        return env
            .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-cdc")
            .map(new MapFunction<String, TableProcess>() {
                @Override
                public TableProcess map(String value) throws Exception {
                    JSONObject obj = JSON.parseObject(value);
                    String op = obj.getString("op");
                    
                    TableProcess tp;
                    if ("d".equals(op)) {
                        // 如果是删除操作, 则获取 before
                        tp = obj.getObject("before", TableProcess.class);
                    } else {
                        // u c r 获取 after
                        tp = obj.getObject("after", TableProcess.class);
                        
                    }
                    tp.setOp(op); // 把 op 的值, 设置到对象中
                    return tp;
                }
            })
            .filter(tp -> "dwd".equals(tp.getSinkType()));  // 在读取配置信息的位置, 添加只保留 dwd 的配置信息
        
        
    }
    
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        return stream
            .filter(new FilterFunction<String>() {
                @Override
                public boolean filter(String value) throws Exception {
                    try {
                        JSONObject obj = JSON.parseObject(value);
                        String type = obj.getString("type");
                        String data = obj.getString("data");
                        // 1. 库必须是 gmall2022 2. table not null 3. type: insert 或者 update 4.data如果当做一个 string 来处理, 长度 > 2
                        return "gmall2022".equals(obj.getString("database"))
                            && null != obj.getString("table")
                            && ("insert".equals(type) || "update".equals(type) || "bootstrap-insert".equals(type))
                            && null != data
                            && data.length() > 2;
                    } catch (Exception e) {
                        System.out.println("数据不是合法的 json 数据: " + value);
                        // 解析 json 格式数据出错,表示不是 json 格式
                        return false;
                    }
                    
                }
            })
            .map(json -> JSON.parseObject(json.replace("bootstrap-", "")));
    }
    
}
/*
对配置的预加载:
    在 open方法中, 通过  jdbc 的方式提前全量读取配置信息
        1. 在哪里预加载
        2. 如何读取
        3. 读取了之后, 存储在什么样的数据结构中
    
    业务数据来的的时候, 先去广播状态中读取配置, 如果状态中没有, 再去预加载的地方读.
 */