package com.millstein.realtime.app.dim;

import com.alibaba.fastjson.JSONObject;
import com.millstein.realtime.app.base.BaseAppV1;
import com.millstein.realtime.bean.TableProcess;
import com.millstein.realtime.common.Constants;
import com.millstein.realtime.util.FlinkSinkUtil;
import com.millstein.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.List;

@Slf4j
public class DimApp extends BaseAppV1 {

    public static void main(String[] args) throws Exception {
        new DimApp().init(2000, 3, "DimApp", Constants.TOPIC_DB);
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, DataStreamSource<String> streamSource) {
        // 1.数据清洗
        SingleOutputStreamOperator<JSONObject> dataStream = doETL(streamSource);

        // 2.将配置信息通过流的方式读取
        SingleOutputStreamOperator<TableProcess> configStream = readTableProcessFromCDC(env);

        // 3.根据配置流中的数据，在phoenix中建表
        configStream = createTableInPhoenix(configStream);

        // 4.清洗后的数据流和配置流进行connect操作
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectStream = connectDataAndConfigStream(
                dataStream, configStream
        );

        // 5.每条数据根据sink_column过滤列
        connectStream = filterBySinkColumns(connectStream);

        // 6.根据不同的配置将不同的维度数据写到phoenix中
        writeToPhoenix(connectStream);
    }

    /**
     * 对数据进行清洗，清洗的规则是：
     * <p>1. 数据不能为空</p>
     * <p>2. 数据是json数据</p>
     * <p>3. 数据库必须为gmall</p>
     * <p>4. type为insert、update和bootstrap-insert</p>
     * <p>5. data中有数据（data是json字符串，如果是空，那么就是`{}`，长度为2）</p>
     * @param streamSource 待清洗的数据流
     * @return
     */
    private SingleOutputStreamOperator<JSONObject> doETL(DataStreamSource<String> streamSource) {
        return streamSource.filter(json -> {
            // 1.数据不能为空
            if (json == null) {
                return false;
            }
            // 2.数据是json数据。不是json数据走catch块
            JSONObject jsonObject;
            try {
                jsonObject = JSONObject.parseObject(json);
            } catch (Exception e) {
                log.error("收到的数据为{}，不是json数据", json);
                return false;
            }
            // 3.数据库必须为gmall
            if (!Constants.BUSINESS_DATABASE_NAME.equals(jsonObject.getString("database"))) {
                return false;
            }
            // 4.type为insert、update和bootstrap-insert
            if (!"insert".equals(jsonObject.getString("type"))
                    && !"update".equals(jsonObject.getString("type"))
                    && !"bootstrap-insert".equals(jsonObject.getString("type"))) {
                return false;
            }
            // 5.data中有数据（data是json字符串，如果是空，那么就是`{}`，长度为2）
            // 5个条件都满足，那么就是数据就是完整的，其他都会被清洗掉
            return jsonObject.getString("data").length() > 2;
        }).map(JSONObject::parseObject);
    }


    /**
     * 将配置信息通过流的方式读取
     * @param env 执行环境对象
     * @return
     */
    private SingleOutputStreamOperator<TableProcess> readTableProcessFromCDC(StreamExecutionEnvironment env) {
        // 1.从flink-cdc中读取gmall-config.table_process中的内容
        MySqlSource<String> readSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("123456")
                .startupOptions(StartupOptions.initial())
                .databaseList("gmall_config")
                .tableList("gmall_config.table_process")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();

        // 2.取出内容中的after信息（修改后的数据）将其转换为tableProcess对象
        return env
                .fromSource(readSource, WatermarkStrategy.noWatermarks(), "table process source")
                .map(json -> {
                    JSONObject jsonObject = JSONObject.parseObject(json);
                    return jsonObject.getObject("after", TableProcess.class);
                });
    }

    /**
     * 根据配置流中的数据，在phoenix中建表
     * @param configStream 待处理的配置流
     * @return
     */
    private SingleOutputStreamOperator<TableProcess> createTableInPhoenix(
            SingleOutputStreamOperator<TableProcess> configStream
    ) {
        return configStream.map(new RichMapFunction<TableProcess, TableProcess>() {
            Connection connection;

            @Override
            public void open(Configuration parameters) throws Exception {
                connection = JdbcUtil.getPhoenixConnection();
            }

            @Override
            public TableProcess map(TableProcess value) throws Exception {
                // 1.拼接创建表的sql语句
                StringBuilder sql = new StringBuilder();
                sql
                        .append("create table if not exists ")
                        .append(value.getSinkTable())
                        .append("(")
                        .append(value.getSinkColumns().replaceAll("[^,]+", "$0 varchar"))
                        .append(", constraint pk primary key(")
                        .append(value.getSinkPk() == null ? "id" : value.getSinkPk())
                        .append("))")
                        .append(value.getSinkExtend() == null ? "" : value.getSinkExtend());
                log.info("phoenix的建表语句为：{}", sql.toString());

                // 2.创建statement对象
                PreparedStatement preparedStatement = connection.prepareStatement(sql.toString());
                // 3.执行sql语句
                preparedStatement.execute();
                // 4.关闭statement
                preparedStatement.close();
                // 5.返回value，作为流继续处理
                return value;
            }

            @Override
            public void close() throws Exception {
                JdbcUtil.closeConnection(connection);
            }
        });
    }

    /**
     * 将数据流和配置流connect成一个流，方便后面的写入操作
     * @param dataStream 数据流
     * @param configStream 配置流
     * @return
     */
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectDataAndConfigStream(
            SingleOutputStreamOperator<JSONObject> dataStream,
            SingleOutputStreamOperator<TableProcess> configStream
    ) {
        // 1.把配置流做成广播流
        // 1.1.创建广播状态描述符
        MapStateDescriptor<String, TableProcess> descriptor = new MapStateDescriptor<>(
                "tableProcessState", String.class, TableProcess.class
        );
        // 1.2.将配置流转换为广播流
        BroadcastStream<TableProcess> broadcastStream = configStream.broadcast(descriptor);

        // 2.数据流connect广播流
        return dataStream
                .connect(broadcastStream)
                .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
                    /**
                     * 处理数据流中的数据
                     * @param value 待处理的数据
                     * @param ctx 上下文对象
                     * @param out 输出对象
                     * @throws Exception
                     */
                    @Override
                    public void processElement(
                            JSONObject value, ReadOnlyContext ctx, Collector<Tuple2<JSONObject, TableProcess>> out
                    ) throws Exception {
                        // 4.处理数据流中数据时，从广播状态读取它的配置信息
                        ReadOnlyBroadcastState<String, TableProcess> broadcastState = ctx.getBroadcastState(descriptor);
                        TableProcess process = broadcastState.get(value.getString("table"));
                        if (process == null) {
                            return;
                        }
                        out.collect(Tuple2.of(value, process));
                    }

                    /**
                     * 处理广播流中的数据
                     * @param value 待处理的数据
                     * @param ctx 上下文对象
                     * @param out 输出对象
                     * @throws Exception
                     */
                    @Override
                    public void processBroadcastElement(
                            TableProcess value, Context ctx, Collector<Tuple2<JSONObject, TableProcess>> out
                    ) throws Exception {
                        // 3.把配置信息写入到广播状态
                        BroadcastState<String, TableProcess> broadcastState = ctx.getBroadcastState(descriptor);
                        broadcastState.put(value.getSourceTable(), value);
                    }
                });
    }


    /**
     * 根据table_process中的sink_column列中的内容，过滤掉维度数据中不需要的列
     * @param connectStream 待过滤的流数据
     * @return
     */
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> filterBySinkColumns(
            SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectStream
    ) {
        return connectStream
                .map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
                    @Override
                    public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> value) throws Exception {
                        JSONObject data = value.f0.getJSONObject("data");
                        TableProcess config = value.f1;
                        List<String> containColumns = Arrays.asList(config.getSinkColumns().split(","));
                        // 如果不在sink_column列包含的范围内，那么就删除该键值对
                        data.keySet().removeIf(key -> !containColumns.contains(key));
                        return value;
                    }
                });
    }

    /**
     * 将流中的数据写入phoenix
     * @param connectStream 待处理的流
     */
    private void writeToPhoenix(
            SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectStream
    ) {
        connectStream.addSink(FlinkSinkUtil.getPhoenixSink());
    }
}
