package com.atguigu.gmall.realtime.app.dwd.db;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseApp;
import com.atguigu.gmall.realtime.bean.TableProcess;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.gmall.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.util.*;

/**
 * @Author lzc
 * @Date 2023/7/3 11:19
 */
@Slf4j
public class Dwd_09_BaseDb extends BaseApp {
    public static void main(String[] args) {
        new Dwd_09_BaseDb().start(20001,
                                  2,
                                  "DimApp",
                                  Constant.TOPIC_ODS_DB
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        // 1. 对消费的数据, 做数据清洗
        SingleOutputStreamOperator<JSONObject> etledStream = etl(stream);
        // 2. 通过 flink cdc 读取配置表的数据
        SingleOutputStreamOperator<TableProcess> tpStream = readTableProcess(env);
        // 3. 数据流去 connect 配置流
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dimDataToTpStream = connect(etledStream, tpStream);
        // 5. 删除不需要的字段
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream = deleteNotNeedColumns(dimDataToTpStream);
        // 6. 写出到 Kafka 中
        writeToKafka(resultStream);
        
        
    }
    
    private void writeToKafka(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream) {
        resultStream.sinkTo(FlinkSinkUtil.getKafkaSink());
    }
    
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> deleteNotNeedColumns(
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dimDataToTpStream) {
        return dimDataToTpStream
            .map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
                @Override
                public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> tp) throws Exception {
                    JSONObject data = tp.f0;
                    List<String> columns = new ArrayList<>(Arrays.asList(tp.f1.getSinkColumns().split(",")));
                    columns.add("op_type");
                    
                    data.keySet().removeIf(key -> !columns.contains(key));
                    return tp;
                }
            });
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(
        SingleOutputStreamOperator<JSONObject> dataStream,
        SingleOutputStreamOperator<TableProcess> tpStream) {
        
        // 1. 把配置流做成广播流
        // key: 表名:type   user_info:ALL
        // value: TableProcess
        MapStateDescriptor<String, TableProcess> desc = new MapStateDescriptor<String, TableProcess>("tp", String.class, TableProcess.class);
        BroadcastStream<TableProcess> tpBcStream = tpStream.broadcast(desc);
        // 2. 数据流去 connect 广播流
        return dataStream
            .connect(tpBcStream)
            .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
                
                private HashMap<String, TableProcess> map;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    // open 中没有办法访问状态!!!
                    map = new HashMap<>();
                    // 1. 去 mysql 中查询 table_process 表所有数据
                    java.sql.Connection mysqlConn = JdbcUtil.getMysqlConnection();
                    List<TableProcess> tps = JdbcUtil.queryList(mysqlConn,
                                                                "select * from gmall2023_config.table_process where sink_type=?",
                                                                new Object[]{"dwd"},
                                                                TableProcess.class,
                                                                true
                    );
                    
                    for (TableProcess tp : tps) {
                        
                        String key = getKey(tp.getSourceTable(), tp.getSourceType());
                        map.put(key, tp);
                        
                        
                    }
                    JdbcUtil.closeConnection(mysqlConn);
                }
                
                // 4. 处理数据流中的数据: 从广播状态中读取配置信息
                @Override
                public void processElement(JSONObject obj,
                                           ReadOnlyContext ctx,
                                           Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    ReadOnlyBroadcastState<String, TableProcess> state = ctx.getBroadcastState(desc);
                    String key = getKey(obj.getString("table"), obj.getString("type"));
                    TableProcess tp = state.get(key);
                    
                    if (tp == null) {  // 如果状态中没有查到, 则去 map 中查找
                        tp = map.get(key);
                        if (tp != null) {
                            System.out.println("在 map 中查找到 " + key);
                        }
                    } else {
                        System.out.println("在 转态 中查找到 " + key);
                    }
                    if (tp != null) { // 这条数据找到了对应的配置信息
                        JSONObject data = obj.getJSONObject("data");
                        data.put("op_type", obj.getString("type"));  // 后期需要
                        out.collect(Tuple2.of(data, tp));
                    }
                }
                
                // 3. 处理广播流中的数据: 把配置信息存入到广播状态中
                @Override
                public void processBroadcastElement(TableProcess tp,
                                                    Context ctx,
                                                    Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    BroadcastState<String, TableProcess> state = ctx.getBroadcastState(desc);
                    String key = getKey(tp.getSourceTable(), tp.getSourceType());
                    
                    if ("d".equals(tp.getOp())) {
                        // 删除状态
                        state.remove(key);
                        // map中的配置也要删除
                        map.remove(key);
                    } else {
                        // 更新或者添加状态
                        state.put(key, tp);
                    }
                }
                
                private String getKey(String table, String type) {
                    return table + ":" + type;
                }
            });
    }
    
    
    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {
        // useSSL=false
        Properties props = new Properties();
        props.setProperty("useSSL", "false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
            .hostname(Constant.MYSQL_HOST)
            .port(Constant.MYSQL_PORT)
            .databaseList("gmall2023_config") // set captured database, If you need to synchronize the whole database, Please set tableList to ".*".
            .tableList("gmall2023_config.table_process") // set captured table
            .username(Constant.MYSQL_USER_NAME)
            .password(Constant.MYSQL_PASSWORD)
            .jdbcProperties(props)
            .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
            .startupOptions(StartupOptions.initial()) // 默认值: initial  第一次启动读取所有数据(快照), 然后通过 binlog 实时监控变化数据
            .build();
        
        return env
            .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "cdc-source")
            .setParallelism(1) // 并行度设置为 1
            .map(new MapFunction<String, TableProcess>() {
                @Override
                public TableProcess map(String value) throws Exception {
                    JSONObject obj = JSON.parseObject(value);
                    String op = obj.getString("op");
                    TableProcess tp;
                    if ("d".equals(op)) {
                        tp = obj.getObject("before", TableProcess.class);
                    } else {
                        tp = obj.getObject("after", TableProcess.class);
                    }
                    tp.setOp(op);
                    
                    return tp;
                }
            })
            .filter(op -> "dwd".equals(op.getSinkType()));  // 过滤出事实表的配置信息
    }
    
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        return stream
            .filter(new FilterFunction<String>() {
                @Override
                public boolean filter(String value) throws Exception {
                    try {
                        JSONObject obj = JSON.parseObject(value);
                        String db = obj.getString("database");
                        String type = obj.getString("type");
                        String data = obj.getString("data");
                        
                        
                        return "gmall2023".equals(db)
                            && ("insert".equals(type)
                            || "update".equals(type))
                            && data != null
                            && data.length() > 2;
                        
                    } catch (Exception e) {
                        //System.out.println("不是正确的 json 格式的数据: " + value);
                        log.warn("不是正确的 json 格式的数据: " + value);
                        return false;
                    }
                    
                }
            })
            .map(JSON::parseObject);
    }
}
/*
配置信息的预加载:
    如果配置信息的数据晚于维度数据,则维度数据会丢失部分.\
    
    需要对[配置信息的数据进行预加载!!!
    在 open 中,先加载所有的配置信息.  存入到 HashMap 中
    
    现在有两个地方存储配置: 1. 广播状态  2. HashMap 集合
    
    当数据信息来的时候,读取的顺序:
        1. 先读取广播状态
        2. 如果广播状态找不到,再去 HashMap
        3. 如果 HashMap 找不到, 则一定不是需要的维度数据
        
    
    


-----
flink cdc
 r  读取的是快照
    before: null
    after 有值
 d  删除
    before 有值
    after null
 c  新增
    before: null
    after 有值
 u 更新
    before 有值
    after 有值
 
 更新主键:
    先 d 再 c
    
    
 
    
 

----
配置表设计:
source_table   source_type   sink_columns      sink_table       sink_family   sink_row_key    sink_extend
 user_info      insert        id,name         dim_user_info       info          id             .....


---------
目标:     把ods_db 中的维度数据过滤出来, 写出到 hbase 中


1. 消费 ods_db 的数据, 做数据清洗,清洗掉脏数据  .....

2. 过滤出需要的维度数据:
    user_info
    
    stream.filter( json -> "user_info".. || ...)....
    
    动态分流
    
    数据流
        ods_db 流里面的数据,有事实表有维度表
        
       
    配置流
        一些配置信息: 需要哪些维度表
        
        做成广播流
        
        让数据流去 connect 广播流
        
            把广播流的数据存储到广播状态
            
            数据流来的是,从广播状态读取自己的配置
            
        
        配置信息: 需要的维度维度表
            存储到 mysql 中
            
            cdc 技术: 把 mysql 中的数据, 直接做成流, 不需要 maxwell
            
 
3. 写出到 hbase 中
        
        
    
    
    
    
    
    


 */