package com.bw.gmall.realtime.dwd.db.split.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.common.base.BaseApp;
import com.bw.gmall.realtime.common.bean.TableProcessDim;
import com.bw.gmall.realtime.common.bean.TableProcessDwd;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.FlinkSinkUtil;
import com.bw.gmall.realtime.common.util.FlinkSourceUtil;
import com.bw.gmall.realtime.common.util.JdbcUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.util.HashMap;
import java.util.List;
import java.util.Set;

public class DwdBaseDb extends BaseApp {
    public static void main(String[] args) throws Exception {
        new DwdBaseDb().start(10019, 4, Constant.DWD_BASE_DB, Constant.TOPIC_DB);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
        // 1、清洗数据
        SingleOutputStreamOperator<JSONObject> etlStream = stream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(s);
                    collector.collect(jsonObject);
                } catch (Exception e) {
                    System.out.println("出现异常数据");
                }
                // ctrl+alt+t
            }
        });
//        etlStream.print();
        // 2. 用CDC读取配置表
        DataStreamSource<String> mysql_source = env.fromSource(FlinkSourceUtil.getMysqlSource(Constant.PROCESS_DATABASE, Constant.PROCESS_DWD_TABLE_NAME), WatermarkStrategy.noWatermarks(), "mysql_source");

        //3、把配置表转成实体对象
        SingleOutputStreamOperator<TableProcessDwd> tableProcess = mysql_source.flatMap(new FlatMapFunction<String, TableProcessDwd>() {
            @Override
            public void flatMap(String s, Collector<TableProcessDwd> collector) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                String op = jsonObject.getString("op");
                TableProcessDwd tableProcessDwd ;
                if ("d".equals(op)) {
                    tableProcessDwd = JSONObject.parseObject(jsonObject.getString("before"), TableProcessDwd.class);
                } else {
                    tableProcessDwd = JSONObject.parseObject(jsonObject.getString("after"), TableProcessDwd.class);
                }
                tableProcessDwd.setOp(op);
                collector.collect(tableProcessDwd);
            }
        });

        // 4、变成广播流
        MapStateDescriptor<String, TableProcessDwd> stateDescriptor = new MapStateDescriptor<String, TableProcessDwd>("broadcast_state", String.class, TableProcessDwd.class);
        BroadcastStream<TableProcessDwd> broadcastStream = tableProcess.broadcast(stateDescriptor);

        // 5、主流跟广播流进行链接
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDwd>> connectStream = etlStream.connect(broadcastStream).process(new BroadcastProcessFunction<JSONObject, TableProcessDwd, Tuple2<JSONObject, TableProcessDwd>>() {
            private HashMap<String, TableProcessDwd> hashMap;

            @Override
            public void open(Configuration parameters) throws Exception {
                hashMap = new HashMap<>();
                // 把配置表数据先缓存,解决 主流比广播流先来的先来的问题
                // 通过JDBC
                Connection mysqlConnection = JdbcUtil.getMysqlConnection();
                List<TableProcessDwd> tableProcessDwds = JdbcUtil.queryList(mysqlConnection, "select * from gmall2023_config.table_process_dwd", TableProcessDwd.class, true);
                for (TableProcessDwd tableProcessDwd : tableProcessDwds) {
                    hashMap.put(tableProcessDwd.getSourceTable(), tableProcessDwd);
                }
                JdbcUtil.closeConnection(mysqlConnection);
            }

            @Override
            public void processElement(JSONObject jsonObject, BroadcastProcessFunction<JSONObject, TableProcessDwd, Tuple2<JSONObject, TableProcessDwd>>.ReadOnlyContext readOnlyContext, Collector<Tuple2<JSONObject, TableProcessDwd>> collector) throws Exception {
                // 从状态里面取
                ReadOnlyBroadcastState<String, TableProcessDwd> broadcastState = readOnlyContext.getBroadcastState(stateDescriptor);
                String table = jsonObject.getString("table");
                String type = jsonObject.getString("type");
                String key = table + "-" + type;
                TableProcessDwd tableProcessDwd = broadcastState.get(key);
                // 判断广播流没有到
                if (tableProcessDwd == null) {
                    tableProcessDwd = hashMap.get(key);
                }
                if (tableProcessDwd != null) {
                    collector.collect(Tuple2.of(jsonObject, tableProcessDwd));
                }
            }

            @Override
            public void processBroadcastElement(TableProcessDwd tableProcessDwd, BroadcastProcessFunction<JSONObject, TableProcessDwd, Tuple2<JSONObject, TableProcessDwd>>.Context context, Collector<Tuple2<JSONObject, TableProcessDwd>> collector) throws Exception {
                // 往状态里面放
                BroadcastState<String, TableProcessDwd> broadcastState = context.getBroadcastState(stateDescriptor);
                String sourceTable = tableProcessDwd.getSourceTable();
                String sourceType = tableProcessDwd.getSourceType();
                String key = sourceTable + "-" + sourceType; // favor_info-insert
                String op = tableProcessDwd.getOp();
                if ("d".equals(op)) {
                    broadcastState.remove(key);
                    hashMap.remove(key);
                } else {
                    broadcastState.put(key, tableProcessDwd);
                }
            }
        });
        // 6.过滤字段
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDwd>> filterColumnStream = connectStream.map(new MapFunction<Tuple2<JSONObject, TableProcessDwd>, Tuple2<JSONObject, TableProcessDwd>>() {
            @Override
            public Tuple2<JSONObject, TableProcessDwd> map(Tuple2<JSONObject, TableProcessDwd> jsonObjectTableProcessDwdTuple2) throws Exception {

                JSONObject f0 = jsonObjectTableProcessDwdTuple2.f0;

                TableProcessDwd f1 = jsonObjectTableProcessDwdTuple2.f1;

                // Maxwell data
                JSONObject data = f0.getJSONObject("data");

                // 要保留的字段
                String sinkColumns = f1.getSinkColumns();

                Set<String> keys = data.keySet();
                keys.removeIf(key -> !sinkColumns.contains(key));
                return jsonObjectTableProcessDwdTuple2;
            }
        });
        filterColumnStream.print();
        // 7.写到对应的Kafka主题中
        filterColumnStream.sinkTo(FlinkSinkUtil.getKafkaWithoNoTopicSink());
    }
}
