package com.bw.gmall.realtime.dwd.db.split.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.common.base.BaseApp;
import com.bw.gmall.realtime.common.bean.TableProcessDwd;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.FlinkSinkUtil;
import com.bw.gmall.realtime.common.util.FlinkSourceUtil;
import com.bw.gmall.realtime.common.util.JdbcUtil;
import com.bw.gmall.realtime.dwd.db.split.function.DwdProcessFunction;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.util.HashMap;
import java.util.List;
import java.util.Set;

public class DwdBaseDb extends BaseApp {
    public static void main(String[] args) {
        new DwdBaseDb().start(Constant.TOPIC_DB,"test11",4,10019);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> dataStreamSource) {

        // 1. 对消费的数据, 做数据清洗
        SingleOutputStreamOperator<JSONObject> eltStream = elt(dataStreamSource);

//        eltStream.print();
        // 2. 通过 flink cdc 读取配置表的数据
        DataStreamSource<String> streamSource = env.fromSource(FlinkSourceUtil.getMysqlSource(Constant.PROCESS_DATABASE, Constant.PROCESS_DWD_TABLE_NAME), WatermarkStrategy.noWatermarks(), "cdc_stream").setParallelism(1);

        //清洗数据 将 配置表数据 转换为 对象格式
        SingleOutputStreamOperator<TableProcessDwd> processtream = processStream(streamSource);


        // 3. 数据流去 connect 配置流

        //配置表 -> 广播流
        MapStateDescriptor<String, TableProcessDwd> mapStateDescriptor = new MapStateDescriptor<>("map", String.class, TableProcessDwd.class);
        BroadcastStream<TableProcessDwd> broadcast = processtream.broadcast(mapStateDescriptor);

//        //主流关联 广播流
        BroadcastConnectedStream<JSONObject, TableProcessDwd> connect = eltStream.connect(broadcast);

        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDwd>> process = connect.process(new DwdProcessFunction(mapStateDescriptor));


//        process.print();

        // 5. 删除不需要的字段
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDwd>> streamOperator = mapExfect(process);


        // 6. 写出到 Kafka 中
        streamOperator.sinkTo(FlinkSinkUtil.getDwdKafkaSink());


    }

    private static SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDwd>> mapExfect(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDwd>> process) {
        return process.map(new MapFunction<Tuple2<JSONObject, TableProcessDwd>, Tuple2<JSONObject, TableProcessDwd>>() {
            @Override
            public Tuple2<JSONObject, TableProcessDwd> map(Tuple2<JSONObject, TableProcessDwd> jsonObjectTableProcessDwdTuple2) throws Exception {

                JSONObject f0 = jsonObjectTableProcessDwdTuple2.f0;

                TableProcessDwd f1 = jsonObjectTableProcessDwdTuple2.f1;

                JSONObject data = f0.getJSONObject("data");

                String columns = f1.getSinkColumns();

                Set<String> keySet = data.keySet();

                keySet.removeIf(key -> !columns.contains(key));


                return jsonObjectTableProcessDwdTuple2;
            }
        });
    }

    private static SingleOutputStreamOperator<TableProcessDwd> processStream(DataStreamSource<String> streamSource) {
        return streamSource.process(new ProcessFunction<String, TableProcessDwd>() {
            @Override
            public void processElement(String s, ProcessFunction<String, TableProcessDwd>.Context context, Collector<TableProcessDwd> collector) throws Exception {
                if (s != null) {
                    JSONObject jsonObject = JSON.parseObject(s);

                    TableProcessDwd tableProcessDwd;
                    String op = jsonObject.getString("op");

                    //获取 op为 删除的数据，因为只有删除的 数据 在before里
                    if ("d".equals(op)) {
                        tableProcessDwd = JSON.parseObject(jsonObject.getString("before"), TableProcessDwd.class);

                    } else {
                        tableProcessDwd = JSON.parseObject(jsonObject.getString("after"), TableProcessDwd.class);

                    }

                    //将取到的op存入
                    tableProcessDwd.setOp(op);

                    collector.collect(tableProcessDwd);

                }
            }
        });
    }

    private static SingleOutputStreamOperator<JSONObject> elt(DataStreamSource<String> dataStreamSource) {
        return dataStreamSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {
                if (s != null) {
                    try {
                        JSONObject jsonObject = JSON.parseObject(s);

                        collector.collect(jsonObject);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        });
    }
}
