package com.atguigu.bigdata.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bigdata.gmall.realtime.app.BaseAppV1;
import com.atguigu.bigdata.gmall.realtime.bean.TableProcess;
import com.atguigu.bigdata.gmall.realtime.common.Constant;
import com.atguigu.bigdata.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.bigdata.gmall.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;

/**
 * @Author lzc
 * @Date 2022/10/13 13:50
 */
public class Dwd_08_BaseDBApp extends BaseAppV1 {
    public static void main(String[] args) {
        new Dwd_08_BaseDBApp().init(
            3008,
            2,
            "Dwd_08_BaseDBApp",
            Constant.TOPIC_ODS_DB
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        // 1. 读取 ods_db 数据(已经读完) 清洗: etl
        SingleOutputStreamOperator<JSONObject> etledStream = etl(stream);
        // 2. 通过 cdc 读取配置信息
        SingleOutputStreamOperator<TableProcess> tpStream = readTableProcess(env);
        // 3. 数据流和配置流进行 connect
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream = connect(etledStream, tpStream);
        // 4. 删除不需要的列
        resultStream = delNoNeedColumns(resultStream);
        // 5. 根据 connect 之后的流, 把不同的数据写入到不同的 topic 种
        writeToKafka(resultStream);
        
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> delNoNeedColumns(
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream) {
        return resultStream.map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
            @Override
            public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> t) throws Exception {
                List<String> list = Arrays.asList(t.f1.getSinkColumns().split(","));
    
                t.f0.keySet().removeIf(c -> !list.contains(c));
    
                return t;
            }
        });
    }
    
    private void writeToKafka(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream) {
        resultStream.addSink(FlinkSinkUtil.getKafkaSink());
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(
        SingleOutputStreamOperator<JSONObject> dataStream,
        SingleOutputStreamOperator<TableProcess> tpStream) {
        
        // 1. 先把 tpStream 转成广播流
        // key:
        MapStateDescriptor<String, TableProcess> tpStateDesc = new MapStateDescriptor<>("tpState", String.class, TableProcess.class);
        BroadcastStream<TableProcess> tpBcStream = tpStream.broadcast(tpStateDesc);
        // 2. 数据流去 connect 广播流
        return dataStream
            .connect(tpBcStream)
            .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
    
                private HashMap<String, TableProcess> tpMap;
    
                @Override
                public void open(Configuration parameters) throws Exception {
                    Connection conn = JdbcUtil.getMysqlConnection();
                    List<TableProcess> tpList = JdbcUtil.queryList(conn,
                                                                   "select * from gmall_config.table_process",
                                                                   null,
                                                                   TableProcess.class,
                                                                   true);
                    
                    tpMap = new HashMap<>();
                    for (TableProcess tp : tpList) {
                        if ("dwd".equals(tp.getSinkType())) {
                            String key = tp.getSourceTable() + ":" + tp.getSourceType() + ":" + (tp.getSinkExtend() == null ? "" : tp.getSinkExtend());
                            tpMap.put(key, tp);
                        }
                    }
                   
                    JdbcUtil.close(conn);
                    
                }
    
                @Override
                public void processElement(JSONObject value,
                                           ReadOnlyContext ctx,
                                           Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                 
                    // 根据来的数据, 找到对应的配置信息
                    String table = value.getString("table");
                    String type = value.getString("type");
                    String key = table + ":" + type + ":";
                    if ("coupon_use".equals(table) && "update".equals(type)) {
                        // 优惠券使用中
                        JSONObject old = value.getJSONObject("old");
                        JSONObject data = value.getJSONObject("data");
                        if ("1401".equals(old.getString("coupon_status")) && "1402".equals(data.getString("coupon_status"))) {
                            key += "{\"data\": {\"coupon_status\": \"1402\"}, \"old\": {\"coupon_status\": \"1401\"}}";
                        } else if (data.getString("used_time") != null) {
                            key += "{\"data\": {\"used_time\": \"not null\"}}";
                        }
                    }
                    
                    ReadOnlyBroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    
                    TableProcess tp = state.get(key);
                    if (tp == null) {
                        // 再去 map 中看看有没有
                        tp = tpMap.get(key);
                    }
                    if (tp != null) {
                        out.collect(Tuple2.of(value.getJSONObject("data"), tp));
                    }
                    
                }
                
                @Override
                public void processBroadcastElement(TableProcess tp,
                                                    Context ctx,
                                                    Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    // 处理广播数据: 把配置信息写入到广播状态中
                    BroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    String key = tp.getSourceTable() + ":" + tp.getSourceType() + ":" + (tp.getSinkExtend() == null ? "" : tp.getSinkExtend());
                    state.put(key, tp);
                    
                    if("d".equals(tp.getOp())){
                        // 删除状态
                        state.remove(key);
                        tpMap.remove(key);
                    }
                }
            });
        
        
    }
    
    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
            .hostname("hadoop162")
            .port(3306)
            .databaseList("gmall_config") // set captured database, If you need to synchronize the whole database, Please set tableList to ".*".
            .tableList("gmall_config.table_process") // set captured table
            .username("root")
            .password("aaaaaa")
            .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
            // 默认一启动, 先读取全量数据(快照), 然后再根据 binlog 来实时监控变化数据
            .startupOptions(StartupOptions.initial())
            .build();
        
        return env
            .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-source")
            .map(new MapFunction<String, TableProcess>() {
                @Override
                public TableProcess map(String value) throws Exception {
                    JSONObject obj = JSON.parseObject(value);
                    String op = obj.getString("op");
                    TableProcess tp = null;
                    if ("d".equals(op)) {
                        // 如果是删除, 取 before, 封装到 TableProcess 中
                        tp = obj.getObject("before", TableProcess.class);
                    } else {
                        tp = obj.getObject("after", TableProcess.class);
                    }
                    tp.setOp(op);
                    return tp;
                }
            })
            .filter(tp -> "dwd".equals(tp.getSinkType())); // 过滤出 dwd 的配置
        
    }
    
    
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        return stream
            .filter(new FilterFunction<String>() {
                @Override
                public boolean filter(String value) throws Exception {
                    try {
                        JSONObject obj = JSON.parseObject(value.replaceAll("bootstrap-", ""));
                        String type = obj.getString("type");
                        String data = obj.getString("data");
                        // 证明 json 格式是对的
                        return "gmall2022".equals(obj.getString("database"))
                            && ("insert".equals(type) || "update".equals(type))
                            && data != null
                            && data.length() > 2;
                        
                    } catch (Exception e) {
                        System.out.println("你的 json 格式数据异常: " + value);
                        return false;
                    }
                }
            })
            .map(json -> JSON.parseObject(json.replaceAll("bootstrap-", "")));
        
    }
}
/*
交互域
    评论
    收藏
用户域
    用户注册事实表
工具域
    优惠券领用
    优惠券下单
    优惠券支付


动态分流:
    思路和技术和 DimApp 完全一样
    
    1. 数据流 读取 ods_db
    2. 配置流 读取 配置信息
    3. 配置流做成广播流和数据流进行 connect
    4. 数据和配置做成一个元组
    5. 不同的数据写入到不同topic种
    
*/
