package com.warren.financial.lease.realtime.app.dim;

import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.warren.financial.lease.realtime.app.func.DimSinkFunc;
import com.warren.financial.lease.realtime.bean.TableProcess;
import com.warren.financial.lease.realtime.common.FinancialLeaseCommon;
import com.warren.financial.lease.realtime.util.CreateEnvUtil;
import com.warren.financial.lease.realtime.util.HBaseUtil;
import com.warren.financial.lease.realtime.util.KafkaUtil;
import com.warren.financial.lease.realtime.util.MysqlUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.Arrays;
import java.util.HashMap;

/**
 * @AUTHOR warren
 * @date 2023/12/11
 **/
public class FinancialLeaseDimApp {

    public static void main(String[] args) throws Exception {
        // 创建环境
        StreamExecutionEnvironment env = CreateEnvUtil.getExecutionEnvironment(8082, "financial_lease_dim_app");

        // 从kafka 中获取topic_db 原始数据
        String topicName = FinancialLeaseCommon.KAFKA_ODS_TOPIC;
        String appName = "financial_lease_dim_app";
        KafkaSource<String> kafkaConsumer = KafkaUtil.getKafkaConsumer(topicName, appName, OffsetsInitializer.earliest());
        DataStreamSource<String> kafkaSource = env.fromSource(kafkaConsumer, WatermarkStrategy.noWatermarks(), appName);

        // 从mysql 中读取配置表数据
        MySqlSource<String> mysqlSource = CreateEnvUtil.getMysqlSource();
        DataStreamSource<String> flinkCDCSource = env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), appName);

        // 在hbase 创建维度表
        SingleOutputStreamOperator<TableProcess> processStream = flinkCDCSource.process(new ProcessFunction<String, TableProcess>() {
            Connection connection = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 获取hbase 的连接
                connection = HBaseUtil.getHBaseConnection();
            }

            @Override
            public void processElement(String jsonStr, ProcessFunction<String, TableProcess>.Context context, Collector<TableProcess> collector) throws Exception {
                // 根据配置表数据 创建表格
                JSONObject jsonObject = JSONObject.parseObject(jsonStr);
                TableProcess before_tableProcess = jsonObject.getObject("before", TableProcess.class);
                TableProcess tableProcess = jsonObject.getObject("after", TableProcess.class);
                String op = jsonObject.getString("op");

                if ("r".equals(op) || "c".equals(op)) {
                    // 新增表格
                    createTable(tableProcess);
                } else if ("d".equals(op)) {
                    deleteTable(before_tableProcess);
                } else {
                    //更新操作：先删除再创建
                    deleteTable(before_tableProcess);
                    createTable(tableProcess);
                }

                tableProcess.setOperateType(op);
                collector.collect(tableProcess);
            }

            @Override
            public void close() throws Exception {
                // 关闭hbase 连接
                HBaseUtil.closeHBaseConnection(connection);
            }

            public void createTable(TableProcess tableProcess) {
                try {
                    HBaseUtil.createTable(connection, FinancialLeaseCommon.HBASE_NAMESPACE, tableProcess.getSinkTable(), tableProcess.getSinkFamily());
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            public void deleteTable(TableProcess tableProcess) {
                try {
                    HBaseUtil.deleteTable(connection, FinancialLeaseCommon.HBASE_NAMESPACE, tableProcess.getSinkTable());
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });


        // 广播配置表合并
        MapStateDescriptor<String,TableProcess> mapStateDescriptor = new MapStateDescriptor<String, TableProcess>("broadcast_state",String.class, TableProcess.class);
        BroadcastStream<TableProcess> broadcastStream = processStream.broadcast(mapStateDescriptor);
        BroadcastConnectedStream<String, TableProcess> connectedStream = kafkaSource.connect(broadcastStream);

        // 处理合并后的主流数据，得到维度表数据
        // 返回值类型为<维度数据的类型，数据本身，维度表元数据>
        SingleOutputStreamOperator<Tuple3<String, JSONObject, TableProcess>> dimStream = connectedStream.process(new BroadcastProcessFunction<String, TableProcess, Tuple3<String, JSONObject, TableProcess>>() {

            //属性存储初始化读取表格
            private HashMap<String, TableProcess> configMap = new HashMap<>();

            @Override
            public void open(Configuration parameters) throws Exception {
                //处理配置流比主流慢到的原因
                java.sql.Connection connection = MysqlUtil.getConnection();
                PreparedStatement ps = connection.prepareStatement("select * from financial_lease_config.table_process");
                ResultSet resultSet = ps.executeQuery();

                while (resultSet.next()) {
                    TableProcess tableProcess = new TableProcess();
                    tableProcess.setSourceTable(resultSet.getString(1));
                    tableProcess.setSinkTable(resultSet.getString(2));
                    tableProcess.setSinkFamily(resultSet.getString(3));
                    tableProcess.setSinkColumns(resultSet.getString(4));
                    tableProcess.setSinkRowKey(resultSet.getString(5));
                    configMap.put(tableProcess.getSourceTable(), tableProcess);
                }
                ps.close();
                connection.close();
            }

            @Override
            public void processElement(String value, BroadcastProcessFunction<String, TableProcess, Tuple3<String, JSONObject, TableProcess>>.ReadOnlyContext context, Collector<Tuple3<String, JSONObject, TableProcess>> collector) throws Exception {
                // 处理主流数据
                // 读取广播状态的数据 判断当前数据是否为维度表
                // 如果时维度表 保留数据向下游写出
                ReadOnlyBroadcastState<String, TableProcess> broadcastState = context.getBroadcastState(mapStateDescriptor);
                // 判断当前表时否维度表
                JSONObject jsonObject = JSONObject.parseObject(value);
                String tableName = jsonObject.getString("table");
                String type = jsonObject.getString("type");

                if ("bootstrap-start".equals(type) || "bootstrap-complete".equals(type)) {
                    // 当前为空数据 不需要进行操作
                    return;
                }

                TableProcess tableProcess = broadcastState.get(tableName);

                if (tableProcess == null) {
                    tableProcess = configMap.get(tableName);
                }

                if (tableProcess == null) {
                    return;
                }

                String[] columns = tableProcess.getSinkColumns().split(",");
                JSONObject data = jsonObject.getJSONObject("data");

                if ("delete".equals(data.getString("type"))) {
                    data = jsonObject.getJSONObject("old");
                } else {
                    data.keySet().removeIf(key -> !Arrays.asList(columns).contains(key));
                }

                collector.collect(Tuple3.of(type, data, tableProcess));
            }

            @Override
            public void processBroadcastElement(TableProcess tableProcess, BroadcastProcessFunction<String, TableProcess, Tuple3<String, JSONObject, TableProcess>>.Context context, Collector<Tuple3<String, JSONObject, TableProcess>> collector) throws Exception {
                // 处理广播流数据
                // 将配置表信息写入到广播状态中
                // 读取当前的广播状态
                BroadcastState<String, TableProcess> broadcastState = context.getBroadcastState(mapStateDescriptor);
                String op = tableProcess.getOperateType();
                if ("d".equals(op)) {
                    // 删除当前广播状态
                    if (broadcastState.contains(tableProcess.getSourceTable())) {
                        broadcastState.remove(tableProcess.getSourceTable());
                    }

                    // 删除configMap中的数据
                    configMap.remove(tableProcess.getSourceTable());

                } else {
                    broadcastState.put(tableProcess.getSinkTable(), tableProcess);
                }
            }
        });

        dimStream.print("dim>>>>>>>>");

        // 写出到HBase
        dimStream.addSink(new DimSinkFunc());

        // 执行任务
        env.execute();

    }
}
