package groupOne.app.DIM;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;

import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import groupOne.app.BaseAppOneStream;
import groupOne.bean.TableProcess;
import groupOne.common.Constant;
import groupOne.util.FlinkSinkUtil;
import groupOne.util.JdbcUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.List;
import java.util.Set;

public class DimAppXiang extends BaseAppOneStream {
    public static void main(String[] args) {
        new DimAppXiang().init(
                2001,
                2,
                "DimApp",
                Constant.TOPIC_ODS_DB
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
        //etl过滤
        SingleOutputStreamOperator<JSONObject> etlStream = etl(stream);
//        etlStream.print();
        //读取配置表
        SingleOutputStreamOperator<TableProcess> tpStream = readTp(env);
        //在Phoenix中创建dim表，将配置表转化成广播流，进行connect
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connStream = connect(etlStream, tpStream);

        //过滤出需要的字段
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream = filterData(connStream);

        //写入phoenix
        writeDimToPhoenix(resultStream);
    }

    private void writeDimToPhoenix(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream) {
        resultStream.addSink(FlinkSinkUtil.getPhoenixSink());
    }

    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> filterData(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connStream) {
        return connStream
                .map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
                    @Override
                    public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> tuple2) throws Exception {
                        TableProcess tp = tuple2.f1;
                        List<String> columns = Arrays.asList(tp.getSinkColumns().split(","));
                        JSONObject data = tuple2.f0;

                        Set<String> keySet = data.keySet();
                        keySet.removeIf(key -> !columns.contains(key) && !"op_type".equals(key));
                        return tuple2;
                    }
                });
    }

    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(SingleOutputStreamOperator<JSONObject> etlStream, SingleOutputStreamOperator<TableProcess> tpStream) {
        tpStream
                .map(new RichMapFunction<TableProcess, TableProcess>() {

                    private Connection phoenixConnect;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        phoenixConnect = JdbcUtil.getPhoenixConnection();
                    }

                    @Override
                    public void close() throws Exception {
                        phoenixConnect.close();
                    }

                    // create table if not exists table(name varchar, age varchar, constraint abc primary key(name)) null
                    @Override
                    public TableProcess map(TableProcess tp) throws Exception {
                        if (phoenixConnect == null){
                            phoenixConnect = JdbcUtil.getPhoenixConnection();
                        }

                        String tableName = tp.getSinkTable();
                        String columns = tp.getSinkColumns();
                        String sinkPk = tp.getSinkPk();
                        String extend = tp.getSinkExtend();

                        StringBuilder sql = new StringBuilder()
                                .append("create table if not exists ")
                                .append(tableName)
                                .append("(")
                                .append(columns.replaceAll("[^,]+","$0 varchar"))
                                .append(", constraint pk primary key(")
                                .append(sinkPk == null ? "id" : sinkPk)
                                .append("))")
                                .append(extend == null ? "" : extend);
                        System.out.println(sql);
                        PreparedStatement ps = phoenixConnect.prepareStatement(sql.toString());

                        ps.execute();

                        ps.close();

                        return tp;
                    }
                });
        MapStateDescriptor<String, TableProcess> bcDesc = new MapStateDescriptor<>("bcDesc", String.class, TableProcess.class);
        BroadcastStream<TableProcess> bcStream = tpStream.broadcast(bcDesc);

        return etlStream
                .connect(bcStream)
                .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject,TableProcess>>() {
                    @Override
                    public void processElement(JSONObject obj,
                                               BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>.ReadOnlyContext ctx,
                                               Collector<Tuple2<JSONObject, TableProcess>> collector) throws Exception {
                        ReadOnlyBroadcastState<String, TableProcess> bcState = ctx.getBroadcastState(bcDesc);
                        String table = obj.getString("table");
                        TableProcess tp = bcState.get(table);
                        if (tp != null){
                            JSONObject data = obj.getJSONObject("data");
                            data.put("op_type",obj.getString("type"));//给后面进行旁路缓存时用，用于删除redis中过期的数据
                            collector.collect(Tuple2.of(data,tp));
                        }
                    }

                    @Override
                    public void processBroadcastElement(TableProcess tp,
                                                        BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>.Context ctx,
                                                        Collector<Tuple2<JSONObject, TableProcess>> collector) throws Exception {
                        BroadcastState<String, TableProcess> bcState = ctx.getBroadcastState(bcDesc);
                        bcState.put(tp.getSourceTable(),tp);
                    }
                });
    }

    private SingleOutputStreamOperator<TableProcess> readTp(StreamExecutionEnvironment env) {
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop162")
                .port(3306)
                .databaseList("edu_config")
                .tableList("edu_config.table_process")
                .username("root")
                .password("aaaaaa")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();

        return env
                .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(),"mySqlSource")
                .map(json->{
                    JSONObject obj = JSON.parseObject(json);

                    return obj.getObject("after", TableProcess.class);
                });
    }

    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        return stream
                .map(JSON::parseObject)
                .filter(obj -> {
                    String type = obj.getString("type").replaceAll("bootstrap-", "");
                    return "edu".equals(obj.getString("database"))
                            &&("insert".equals(type) || "update".equals(type))
                            && obj.getString("data") != null
                            && obj.getString("data").length() >2;
                });
    }
}
