package com.raylu.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.raylu.realtime.bean.MaxwellBean;
import com.raylu.realtime.bean.TableProcessBean;
import com.raylu.realtime.utils.KafkaSinkUtil;
import com.raylu.realtime.utils.KafkaSourceUtil;
import com.raylu.realtime.utils.PropertiesUtil;
import com.raylu.realtime.utils.RedisUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.runtime.state.memory.MemoryStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.*;
import java.util.function.Predicate;

/**
 * Description:
 * <p>
 * Create by lucienoz on 2021/12/26.
 * Copyright © 2021 lucienoz. All rights reserved.
 */
public class BaseDbApp {

    public static void main(String[] args) throws Exception {
        Properties prop = PropertiesUtil.load("config.properties");
        //TODO 1. 基础环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //TODO 1.1. 设置并行度
        env.setParallelism(4);

        //TODO 1.2. 设置检查点
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.setStateBackend(new FsStateBackend(prop.getProperty("base.db.app.fsstatebackend.url")));
//        env.setStateBackend(new MemoryStateBackend());
//        env.setStateBackend(new EmbeddedRocksDBStateBackend());
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        System.setProperty("HADOOP_USER_NAME", "raylu");

        //TODO 1.3. 应用重启策略
//        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.seconds(3L)));

        //TODO 2. 创建业务数据流
        SingleOutputStreamOperator<MaxwellBean> mainStream = env
                .addSource(KafkaSourceUtil.getKafkaSource(prop.getProperty("base.db.app.kafka.topic"), prop.getProperty("base.db.app.kafka.group-id")))
                .map(r -> {
                    return JSON.parseObject(r, MaxwellBean.class);
                });

        //TODO 3. 创建业务数据配置流
        DataStreamSource<String> propStream = env.addSource(MySQLSource.<String>builder()
                .hostname(prop.getProperty("base.db.app.cdc.mysql.hostname"))
                .databaseList(prop.getProperty("base.db.app.cdc.mysql.databaseList").split(","))
                .username(prop.getProperty("base.db.app.cdc.mysql.username"))
                .password(prop.getProperty("base.db.app.cdc.mysql.password"))
                .tableList(prop.getProperty("base.db.app.cdc.mysql.tableList").split(","))
                .startupOptions(StartupOptions.initial())
                .deserializer(new DebeziumDeserializationSchema<String>() {
                    @Override
                    public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
                                 /*
                                 "database": "gmall_2021",
                                 "table": "order_detail",
                                 "type": "insert",
                                 "ts": 1640680977,
                                 "data":{},
                                 "old": {},
                                 */
                        JSONObject maxwellJSONObject = new JSONObject();
                        Struct value = (Struct) sourceRecord.value();
                        String db = value.getStruct("source").getString("db");
                        maxwellJSONObject.put("database", db);
                        String tableName = value.getStruct("source").getString("table");
                        maxwellJSONObject.put("table", tableName);
                        Long ts = value.getInt64("ts_ms") / 1000L;
                        maxwellJSONObject.put("ts", ts);
                        String op = value.getString("op");
                                /*
                                 READ("r"),CREATE("c"),UPDATE("u"),DELETE("d");
                                 */
                        String type = "";
                        switch (op) {
                            case "r":
                                type = "select";
                                break;
                            case "c":
                                type = "insert";
                                break;
                            case "u":
                                type = "update";
                                break;
                            case "d":
                                type = "delete";
                                break;
                        }
                        maxwellJSONObject.put("type", type);

                        JSONObject dataJSONObject = new JSONObject();
                        Struct dataStruct = value.getStruct("after");
                        List<Field> fields = dataStruct.schema().fields();
                        for (Field field : fields) {
                            dataJSONObject.put(field.name(), dataStruct.getString(field.name()));
                        }

                        maxwellJSONObject.put("data", dataJSONObject);

                        collector.collect(maxwellJSONObject.toJSONString());


                    }

                    @Override
                    public TypeInformation<String> getProducedType() {
                        return Types.STRING;
                    }
                })
                .port(3306)
                .build());

        //TODO 4. 业务流与配置流进行connect，配置流进行广播
        //TODO 4.1. 配置流进行广播
        BroadcastStream<String> broadcastStream = propStream.broadcast(new MapStateDescriptor<String, TableProcessBean>("table-process", Types.STRING, Types.POJO(TableProcessBean.class)));
        //TODO 4.2. 主流与广播流进行关联
        SingleOutputStreamOperator<JSONObject> processStream = mainStream.connect(broadcastStream)
                .process(new BroadcastProcessFunction<MaxwellBean, String, JSONObject>() {
                    private Connection connection;

                    //TODO 4.3. 广播流将流数据映射到广播流状态变量中
                    @Override
                    public void processBroadcastElement(String value, BroadcastProcessFunction<MaxwellBean, String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                        BroadcastState<String, TableProcessBean> tableProcessBeanBroadcastState = ctx.getBroadcastState(new MapStateDescriptor<String, TableProcessBean>("table-process", Types.STRING, Types.POJO(TableProcessBean.class)));
                        MaxwellBean maxwellBean = JSON.parseObject(value, MaxwellBean.class);
                        TableProcessBean tableProcessBean = JSON.parseObject(maxwellBean.data, TableProcessBean.class);
                        String operateType = tableProcessBean.operateType; //insert update delete
                        String sourceTable = tableProcessBean.sourceTable;
                        if (!"delete".equals(operateType)) {
                            //TODO 4.3.1. 广播流将insert和update的配置表信息更新到广播流变量中
                            tableProcessBeanBroadcastState.put(sourceTable, tableProcessBean);
                        } else {
                            //TODO 4.3.2. 广播流将配置表中delete数据进行移除
                            tableProcessBeanBroadcastState.remove(sourceTable);
                        }

                        //TODO 4.3.3. 判断广播流sink_type为hbase数据在hbase中是否建立，若未建立则建立
                        if ("hbase".equals(tableProcessBean.getSinkType())) {
                            StringBuilder stringBuilder = new StringBuilder();
                            stringBuilder.append("create table if not exists " + prop.getProperty("base.db.app.cdc.phoenix.schem") + "." + tableProcessBean.getSinkTable() + " (\n");
                            //判断PK是联合主键还是单键
                            String[] pKs = tableProcessBean.getSinkPk().split(",");
                            String[] columns = tableProcessBean.getSinkColumns().split(",");
                            if (pKs.length > 1) {

                                for (String column : columns) {
                                    stringBuilder.append(column + " varchar, \n");
                                }
                                stringBuilder.append("CONSTRAINT " + tableProcessBean.getSinkTable() + "_PK PRIMARY KEY (" + tableProcessBean.getSinkPk() + ")\n");
                            } else {
                                for (int i = 0; i < columns.length - 1; i++) {
                                    stringBuilder.append(columns[i] + " varchar");
                                    if (columns[i].equals(tableProcessBean.getSinkPk())) {
                                        stringBuilder.append(" primary key,\n");
                                    } else {
                                        stringBuilder.append(",\n");
                                    }
                                }
                                stringBuilder.append(columns[columns.length - 1] + " varchar");
                                if (columns[columns.length - 1].equals(tableProcessBean.getSinkPk())) {
                                    stringBuilder.append(" primary key \n");
                                } else {
                                    stringBuilder.append("\n");
                                }
                            }
                            stringBuilder.append(")" + (tableProcessBean.getSinkExtend() == null ? "" : tableProcessBean.getSinkExtend()));

                            System.out.println(stringBuilder.toString());
                            try (PreparedStatement preparedStatement = connection.prepareStatement(stringBuilder.toString())) {

                                preparedStatement.execute();
//                                connection.commit();
                            } catch (SQLException e) {
                                e.printStackTrace();
                                throw new RuntimeException("建表语句出错");
                            }
                        }


                    }

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        connection = DriverManager.getConnection(prop.getProperty("base.db.app.cdc.phoenix.url"));
                    }

                    @Override
                    public void close() throws Exception {
                        super.close();
                        connection.close();
                    }

                    //TODO 4.4. 将主流中的数据按照配置表的中信息进行分流，具体分为Kafk(事实)\hbase(维度)
                    @Override
                    public void processElement(MaxwellBean value, BroadcastProcessFunction<MaxwellBean, String, JSONObject>.ReadOnlyContext ctx, Collector<JSONObject> out) throws Exception {
                        //TODO 4.4.1. 将广播流状态变量取出
                        ReadOnlyBroadcastState<String, TableProcessBean> broadcastState = ctx.getBroadcastState(new MapStateDescriptor<String, TableProcessBean>("table-process", Types.STRING, Types.POJO(TableProcessBean.class)));
                        String tableName = value.getTable();
                        TableProcessBean tableProcessBean = broadcastState.get(tableName);
                        //TODO 4.4.2. Kafka数据从主流中输出
                        if (tableProcessBean != null && "kafka".equals(tableProcessBean.getSinkType()) && !"delete".equals(value.getType())) {
                            JSONObject jsonObject = new JSONObject();
                            if ("update".equals(value.getType())) {
                                jsonObject.put("table_name", tableProcessBean.getSinkTable()+"_UPDATE");
                            }else {
                                jsonObject.put("table_name", tableProcessBean.getSinkTable());
                            }
                            String[] fields = tableProcessBean.getSinkColumns().split(",");
                            List<String> fieldList = Arrays.asList(fields);
                            JSONObject dataJSON = JSON.parseObject(value.getData());
                            Set<Map.Entry<String, Object>> entries = dataJSON.entrySet();
                            entries.removeIf(r -> !fieldList.contains(r.getKey()));
                            jsonObject.put("data", value.getData());
                            out.collect(jsonObject);
                        }

                        //TODO 4.4.3. hbase维度数据从侧流中输出

                        if (tableProcessBean != null && "hbase".equals(tableProcessBean.getSinkType())) {
                            JSONObject jsonObject = new JSONObject();
                            JSONObject dataJSONObject = JSON.parseObject(value.getData());
                            String[] fields = tableProcessBean.getSinkColumns().split(",");
                            List<String> fieldList = Arrays.asList(fields);
                            Set<Map.Entry<String, Object>> entries = dataJSONObject.entrySet();
                            //将不满足的字段数据剔除
                            entries.removeIf(f -> !fieldList.contains(f.getKey()));

                            System.out.println(fieldList);
                            System.out.println(dataJSONObject.toJSONString());
                            jsonObject.put("data", dataJSONObject);
                            jsonObject.put("type", value.getType());
                            jsonObject.put("table_name", tableProcessBean.getSinkTable());
                            jsonObject.put("pks", tableProcessBean.getSinkPk());
                            //maxwell bootstrap 会发送bootstrap-start 和 bootstrap-complete 标志数据传输的完成和结束
                            if (dataJSONObject != null && dataJSONObject.size() > 0)
                                ctx.output(new OutputTag<JSONObject>("hbase_dim") {
                                }, jsonObject);
                        }

                    }
                });

        //TODO 5. 业务流按照表类型进行分流，事实表->Kafka DWD_表名主题，维度表->相应的hbase表中
        //TODO 5.1. 将侧流进行声明,并通过sinkfunction将维度数据写入hbase
        DataStream<JSONObject> hbaseStream = processStream.getSideOutput(new OutputTag<JSONObject>("hbase_dim") {
        });
        hbaseStream.addSink(new RichSinkFunction<JSONObject>() {
            private Connection connection;

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                connection = DriverManager.getConnection(prop.getProperty("base.db.app.cdc.phoenix.url"));

            }

            @Override
            public void invoke(JSONObject value, Context context) throws Exception {
                super.invoke(value, context);


                String upsert = getUpsert(value);
                System.out.println("sink invoke:" + value);
                System.out.println(upsert);
                try (PreparedStatement preparedStatement = connection.prepareStatement(upsert)) {
                    preparedStatement.executeUpdate();
                    connection.commit();
                } catch (SQLException e) {
                    e.printStackTrace();
                    throw new RuntimeException("执行插入报错:" + upsert + "@@");
                }
                //若维度表数据发生变化则在redis缓存中删除该值
                RedisUtil.deleteByKey(getRediskey(value));
            }

            private String getRediskey(JSONObject value){
                String table_name = (prop.getProperty("base.db.app.cdc.phoenix.schem")+":"+value.getString("table_name")).toUpperCase();
                String pks = value.getString("pks");
                JSONObject data = value.getJSONObject("data");
                StringBuilder keyStr = new StringBuilder();
                for (String pk : pks.split(",")) {
                    keyStr.append(":"+pk.toUpperCase()+":"+data.getString(pk));
                }

                return keyStr.toString();

            }

            private String getUpsert(JSONObject value) {
                StringBuilder upsertSql = new StringBuilder();
                JSONObject data = value.getJSONObject("data");
                String type = value.getString("type");
                String pks = value.getString("pks");
                //sink invoke:{"data":{"birthday":"1994-12-24","login_name":"ui7xm9","gender":"F","create_time":"2021-12-24 10:37:21","name":"平姣婉","user_level":"1","id":3},"type":"insert","table_name":"dim_user_info"}
                if (!"delete".equals(type)) {
                    upsertSql.append("upsert into " + prop.getProperty("base.db.app.cdc.phoenix.schem") + "." + value.getString("table_name") + " (\n");
                    upsertSql.append(StringUtils.join(data.keySet(), ",") + ")\n");
                    upsertSql.append("values('" + StringUtils.join(data.values(), "','") + "')\n");
                } else {
                    upsertSql.append("delete from " + prop.getProperty("base.db.app.cdc.phoenix.schem") + "." + value.getString("table_name") + " where ");
                    String[] split = pks.split(",");
                    for (int i = 0; i < split.length - 1; i++) {
                        String valueStr = data.getString(split[i]);
                        upsertSql.append(" " + split[i] + "='" + valueStr + "' and ");
                    }
                    upsertSql.append(split[split.length - 1] + "='" + data.getString(split[split.length - 1]) + "' ");
                }
                System.out.println(upsertSql.toString() + " data=" + data.toJSONString());

                return upsertSql.toString();
            }

            @Override
            public void close() throws Exception {
                super.close();
                if (connection != null) {
                    connection.close();
                }
            }
        });

        processStream.addSink(KafkaSinkUtil.getKafkaSinkBySchema(new KafkaSerializationSchema<JSONObject>() {
            @Override
            public ProducerRecord<byte[], byte[]> serialize(JSONObject element, @Nullable Long timestamp) {
                ProducerRecord<byte[], byte[]> stringStringProducerRecord = new ProducerRecord<>(element.getString("table_name").toUpperCase(), element.getString("data").getBytes(StandardCharsets.UTF_8));
                return stringStringProducerRecord;
            }
        }));
        processStream.print("DIM");
        hbaseStream.print("DWD");

        env.execute();

    }

}
