package com.atguigu.medical.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.medical.realtime.app.BaseApp;
import com.atguigu.medical.realtime.bean.TableProcess;
import com.atguigu.medical.realtime.common.Constant;
import com.atguigu.medical.realtime.util.FlinkSinkUtil;
import com.atguigu.medical.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.util.*;


/**
 * @Author lzc
 * @Date 2023/4/19 14:19
 */
@Slf4j
public class DwdBaseDb extends BaseApp {
    public static void main(String[] args) {
        new DwdBaseDb().init(20001,
                             2,
                             "DimApp",
                             Constant.TOPIC_ODS_MEDICAL
        );
        
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        // 1. 对读到数据做数据清洗
        SingleOutputStreamOperator<JSONObject> etledStream = etl(stream);
        // 2. 通过 flink cdc 读取配置表
        SingleOutputStreamOperator<TableProcess> tpStream = readTableProcess(env);
        // 3. 数据流去 connect 配置流
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dataTpStream = connect(etledStream, tpStream);
        // 4. 删除不需要的列
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream = deleteNotNeedColumns(dataTpStream);
        // 5. 写出到 kafka 中
        writeToKafka(resultStream);
    
    
    }
    
    private void writeToKafka(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream) {
        resultStream.sinkTo(FlinkSinkUtil.getKafkaSink());
    }
    
    
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> deleteNotNeedColumns(
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dataTpStream) {
        return dataTpStream.map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
            @Override
            public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> t) throws Exception {
                JSONObject data = t.f0;  // a b c
                List<String> columns = new ArrayList<>(Arrays.asList(t.f1.getSinkColumns().split(",")));  // a b
                columns.add("op_type");
                columns.add("ts");
                // 判断 data 中的 key 是否在 columns 中存在,如果存在就保留, 不存在就删除
                data.keySet().removeIf(key -> !columns.contains(key));
                return t;
            }
        });
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(
        SingleOutputStreamOperator<JSONObject> dataStream,
        SingleOutputStreamOperator<TableProcess> tpStream) {
        // 1. 先把配置流做成广播流
        // key: source_table:ALL   user_info:ALL
        // value: TableProcess 对象
        MapStateDescriptor<String, TableProcess> tpStateDesc = new MapStateDescriptor<>("tpState", String.class, TableProcess.class);
        BroadcastStream<TableProcess> tpBcStream = tpStream.broadcast(tpStateDesc);
        // 2. 让数据流去 connect 广播流
        return dataStream
            .connect(tpBcStream)
            .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
                private final HashMap<String, TableProcess> tpMap = new HashMap<>();
    
                @Override
                public void open(Configuration parameters) throws Exception {
                    // 先加载所有配置信息:
                    // open 中是没有办法操作状态的!
                    // 1. 预加载所有配置信息到 HashMap中(一个并行度一个 HashMap)
                    Connection conn = JdbcUtil.getMysqlConnection();
                    String sql = "select * from medical_config.table_process";
                    List<TableProcess> tps = JdbcUtil.queryList(conn, sql, null, TableProcess.class);  // 执行后,查到所有的配置信息
                    JdbcUtil.closeConnection(conn);
                    for (TableProcess tp : tps) {
                        tpMap.put(getKey(tp.getSourceTable(), tp.getSourceType()), tp);
                    }
        
                }
    
                // 4. 处理数据里中的数据: 从广播状态中取出对应的配置, 把数据和配置组成元组输出
                // 每来一条数据, 执行一次
                @Override
                public void processElement(JSONObject value,
                                           ReadOnlyContext ctx,
                                           Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    String table = value.getJSONObject("source").getString("table");
                    String type = value.getString("op");
                    String key = getKey(table, type);
    
                    ReadOnlyBroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    TableProcess tp = state.get(key);
                    if (tp == null) { // 状态中没有找到配置信息, 取 map中取
                        tp = tpMap.get(key);
                    }
                    // tp 是 null, 表示当前数据不是需要的维度信息
                    if (tp != null) {
                        // 只输出 data 数据
                        JSONObject data = value.getJSONObject("after");
                        data.put("op_type", value.getString("op"));
                        data.put("ts", value.getLong("ts")); // 另外补充一个 ts
                        out.collect(Tuple2.of(data, tp));
                    }
                }
                
                // 3. 处理广播流中的元素: 把配置信息存入到广播状态中
                // 每来一个配置信息, 这个方法执行多次: 每个并行度执行一次
                @Override
                public void processBroadcastElement(TableProcess tp,
                                                    Context ctx,
                                                    Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    
                    BroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    
                    String key = getKey(tp.getSourceTable(), tp.getSourceType());
                    // 如果配置信息被删了, phoenix 中的表也会被删, 则状态中的配置也要删掉,否则写入的数据的时候, 会出现表找不到
                    if ("d".equals(tp.getOp())) {
                        state.remove(key);
                        // map中的配置信息也要删
                        tpMap.remove(key);
                    } else {
                        // 如果是 c 和 r 是新增配置, 如果是 u 是更新配置
                        state.put(key, tp);
                    }
                    
                }
                
                private String getKey(String table, String type) {
                    return table + ":" + type;
                }
                
            });
        
        
    }
    
    
    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {
        // useSSL=false
        Properties props = new Properties();
        props.setProperty("useSSL", "false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
            .hostname(Constant.MYSQL_HOST)
            .port(3306)
            .databaseList("medical_config") // set captured database, If you need to synchronize the whole database, Please set tableList to ".*".
            .tableList("medical_config.table_process") // set captured table 库名.表名
            .username("root")
            .password("aaaaaa")
            .jdbcProperties(props)
            .serverTimeZone("Asia/Shanghai")
            .startupOptions(StartupOptions.initial()) // 第一次启动先读取所有的数据(快照), 然后在使用 binlog 实时监控变化
            .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
            .build();
        
        return env
            .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-cdc")
            .map(new MapFunction<String, TableProcess>() {
                @Override
                public TableProcess map(String value) throws Exception {
                    JSONObject obj = JSON.parseObject(value);
                    String op = obj.getString("op");
                    TableProcess tp;
                    if ("d".equals(op)) {
                        tp = obj.getObject("before", TableProcess.class);
                    } else {
                        tp = obj.getObject("after", TableProcess.class);
                    }
                    tp.setOp(op);
                    return tp;
                }
            })
            .filter(tp -> "dwd".equals(tp.getSinkType()));  // 只过滤出维度表的配置信息
    }
    
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        return stream
            .filter(new FilterFunction<String>() {
                @Override
                public boolean filter(String value) throws Exception {
                    // 是 json 格式  {}
                    try {
                        JSONObject obj = JSON.parseObject(value);
                        String type = obj.getString("op");
                        String database = obj.getJSONObject("source").getString("db");
                        return "medical".equals(database)
                            && ("r".equals(type) || "u".equals(type) || "c".equals(type))
                            && null != obj.getJSONObject("after");
                        
                    } catch (Exception e) {
                        // System.out.println("数据格式有误,不是正确的 json 数据: " + value);
                        log.warn("数据格式有误,不是正确的 json 数据: " + value);
                        
                        return false;
                    }
                    
                }
            })
            .map(JSON::parseObject);
        
    }
}
