package com.practice.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.practice.gmall.realtime.app.BaseApp;
import com.practice.gmall.realtime.bean.TableProcess;
import com.practice.gmall.realtime.common.Constant;
import com.practice.gmall.realtime.util.FlinkSinkUtil;
import com.practice.gmall.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;

/**
 * ClassName: DWD_08_DetailClassify
 * Package: com.practice.gmall.realtime.app.dwd
 * Description:
 * <p>
 * 将明细数据中的工具域、互动域、用户域的数据分流输出到相互独立的Kafka topic中
 * <p>
 * 主要的技术 Flink Stream Connect, Flink CDC, FLink Stateful Computation(Built In Data Structure/Self Define Data Structure)
 * <p>
 * 应用Flink状态管理实现预加载机制
 * 维护一个map状态
 * <p>
 * 写法：实现流处理中API中的接口
 *
 * @Author lzy.ethan@qq.com
 * @Create 2023-02-16 17:17
 */
@Slf4j
public class DWD_08_DetailClassify extends BaseApp {
    public static void main(String[] args) {
        new DWD_08_DetailClassify().init(4001, "DWD_08_DetailClassify", 2, Constant.KAFKA_BROKERS, "ods_db");
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
        // 1. 过滤流，剔除不需要的数据和脏数据（格式不正确、内容不全）
        SingleOutputStreamOperator<JSONObject> etledStream = etl(stream);

        // 2. 预加载TableProcess配置信息
        SingleOutputStreamOperator<TableProcess> tpStream = readTableProcess(env);

        // 3. 将两个流连接起来并
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> matchedStream = match(etledStream, tpStream);

        writeToKafka(matchedStream);

    }

    private void writeToKafka(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> stream) {
       stream.addSink(FlinkSinkUtil.kafkaTuple2SinkFunc());
    }

    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> match(SingleOutputStreamOperator<JSONObject> dataStream, SingleOutputStreamOperator<TableProcess> configStream) {

        MapStateDescriptor<String, TableProcess> tpStateDec = new MapStateDescriptor<>("Config Info", String.class, TableProcess.class);
        BroadcastStream<TableProcess> broadcastStream = configStream.broadcast(tpStateDec);

        // 1. connect two stream
        return dataStream.connect(broadcastStream)
                .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {


                    private HashMap<String, TableProcess> stringTableProcessHashMap;

                    /**
                     * @Description: 生命周期函数，每个并行度开启时运行一次，用于实现配置信息预加载，保证控制信息先于数据到达
                     * @param parameters
                     * @return: void
                     * @Author: lzy
                     * @Date:2023-02-16 - 21:53
                     */
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        stringTableProcessHashMap = new HashMap<>();
                        Connection conn = JdbcUtil.getJdbcConn(Constant.MYSQL_JDBC_DRIVER, Constant.MYSQL_URL, Constant.MYSQL_ROOT_USER, Constant.MYSQL_ROOT_PASSWORD);
                        String querySql = "select * from gmall_config.table_process";

                        List<TableProcess> list = JdbcUtil.queryList(conn, querySql, null, TableProcess.class);

                        for (TableProcess tp : list) {
                            String key = assembleKey(tp.getSourceTable(), tp.getSourceType(), tp.getSinkExtend());
                           stringTableProcessHashMap.put(key, tp);
                        }

                        JdbcUtil.closeConnection(conn);
                    }

                    /**
                     * @Description: 控制数据流中的元素，读取状态中的数据
                     * @param value    表处理控制信息
                     * @param ctx 上下文环境
                     * @param out 处理完毕后输出的Collector
                     * @return: void
                     * @Author: lzy
                     * @Date:2023-02-16 - 21:51
                     */
                    @Override
                    public void processElement(JSONObject value,
                                               ReadOnlyContext ctx,
                                               Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                        ReadOnlyBroadcastState<String, TableProcess> mapState = ctx.getBroadcastState(tpStateDec);

                        String table = value.getString("table");
                        String type = value.getString("type");
                        String extend = "";

                        // todo 优惠券领取需要考虑吗？
                        // 当表示 coupon_use 并且 是 update, 并且是从 1401->1402, extend = {"data": {"coupon_status": "1402"}, "old": {"coupon_status": "1401"}}
                        // 当表示 coupon_use 并且 是 update, 并且是从 used_time 不是 null extend={"data": {"used_time": "not null"}}
                        if ("coupon_use".equals(table) && "update".equals(type)) {
                            JSONObject data = value.getJSONObject("data");
                            JSONObject old = value.getJSONObject("old");

                            // 表示下单的时候使用了优惠券
                            if ("1401".equals(old.getString("coupon_status")) && "1402".equals(data.getString("coupon_status"))) {
                                extend = "{\"data\": {\"coupon_status\": \"1402\"}, \"old\": {\"coupon_status\": \"1401\"}}";
                            } else if (data.getString("used_time") != null) {
                                extend = "{\"data\": {\"used_time\": \"not null\"}}";
                            }
                        }

                        String key = assembleKey(table,type,extend);

                        TableProcess tp = mapState.get(key);

                        if (tp == null) {
                            tp = stringTableProcessHashMap.get(key);
                        }

                        if (tp != null) {
                            JSONObject data = value.getJSONObject("data");
                            data.put("operate_type", value.getString("type"));
                            out.collect(Tuple2.of(data, tp));
                        }
                    }

                    /**
                     * @Description: 控制广播状态的方法
                     * @param tp
                     * @param ctx
                     * @param out
                     * @return: void
                     * @Author: lzy
                     * @Date:2023-02-16 - 21:50
                     */
                    @Override
                    public void processBroadcastElement(TableProcess tp,
                                                        Context ctx,
                                                        Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {

                        BroadcastState<String, TableProcess> mapState = ctx.getBroadcastState(tpStateDec);

                        String sourceTableName = tp.getSourceTable();
                        String sourceTypeName = tp.getSourceType();
                        String sinkExtend = tp.getSinkExtend();
                        String key = assembleKey(sourceTableName, sourceTypeName, sinkExtend);

                        if ("d".equals(tp.getOpType())) {
                            mapState.remove(key);
                            stringTableProcessHashMap.remove(key);
                        } else {
                            mapState.put(key, tp);
                        }

                    }

                    private String assembleKey(String sourceTableName, String sourceTypeName, String sinkExtend) {
                        // return sinkExtend == null ? sourceTableName + ":" + sourceTypeName : sourceTableName + ":" + sourceTypeName + ":" + sinkExtend;
                        return sourceTableName + ":" + sourceTypeName + (sinkExtend == null ? "" : sinkExtend);
                    }

                });


    }

    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {

        Properties properties = new Properties();
        properties.setProperty("useSSL", "false");

        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname(Constant.MYSQL_HOST)
                .port(Constant.PORT)
                .databaseList(Constant.CONFIG_DATABASE) // set captured database
                .tableList(Constant.CONFIG_DATABASE + "." + Constant.MYSQL_CONFIG_TABLE) // set captured table
                .username(Constant.MYSQL_ROOT_USER)
                .password(Constant.MYSQL_ROOT_PASSWORD)
                .jdbcProperties(properties)
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .build();

        return env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL configuration Source")
                .map((MapFunction<String, JSONObject>) JSON::parseObject)
                .process(new ProcessFunction<JSONObject, TableProcess>() {
                    @Override
                    public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, TableProcess>.Context context, Collector<TableProcess> out) {
                        String op = jsonObject.getString("op");
                        if ("u".equals(op) || "c".equals(op) || "r".equals(op)) {
                            TableProcess tp = JSON.parseObject(jsonObject.getString("after"), TableProcess.class);
                            tp.setOpType(op);
                            out.collect(tp);
                        }

                        if ("d".equals(op)) {
                            TableProcess tp = JSON.parseObject(jsonObject.getString("before"), TableProcess.class);
                            tp.setOpType(op);
                            out.collect(tp);
                        }
                    }
                })
                .filter(tp -> "dwd".equals(tp.getSinkType()));
    }

    /**
     * @param stream original data stream read from Kafka
     * @Description: filter out data no in JSON format, reformat data to JSON, not null inspection
     * @return: org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator<com.alibaba.fastjson.JSONObject>
     * @Author: lzy
     * @Date:2023-02-16 - 20:45
     */
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        return stream.filter(str -> {
                    try {
                        JSONObject obj = JSON.parseObject(str);
                        String data = obj.getString("data");
                        String type = obj.getString("type");
                        String database = obj.getString("database");
                        String table = obj.getString("table");

                        return
                                "gmall2022".equals(database)
                                        && null != table
                                        && ("insert".equals(type) || "update".equals(type))
                                        && data != null
                                        && data.length() > 2;

                    } catch (Exception e) {
                        log.warn("No JSON format data, failed to  parse data : " + str);
                        return false;
                    }
                })
                .map(JSON::parseObject);
    }
}
