package com.we.flink.cdcpipeline.test;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.sink.TopicSelector;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.util.Collector;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.we.flink.utils.WeCdcPropertyReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Properties;

public class CDCPipeline {
    public static void CaptureMysqlSource(String propFile, String pipeName) {
        Logger logger = LoggerFactory.getLogger(CDCPipeline.class);
        WeCdcPropertyReader prop = WeCdcPropertyReader.init(propFile);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        try {
            env.setStateBackend(new RocksDBStateBackend(prop.getRocksDBBackendUrl()));

            /** checkpoint configure */
            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(10 * 60 * 1000); // ms
            ckConf.setCheckpointTimeout(60 * 60 * 1000);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500);
            ckConf.enableExternalizedCheckpoints(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
            ckConf.enableUnalignedCheckpoints();

            /** kafka topic */
            String kfkBootStrapServer = prop.getKfkSinkAdd();
            String topicPrefix = prop.getKfkTopicPrefix();
            String topicSuffix = prop.getKfkTopicSuffix();
            String keySeriaSchemaClm = prop.getKeySeriaSchemaClm();

            /** Debezium Properties */
            Properties dbzProperties = new Properties();
            dbzProperties.setProperty("bigint.unsigned.handling.mode", "long");
            dbzProperties.setProperty("decimal.handling.mode", "double");
            dbzProperties.put("converters", "timeAndFloat");
            dbzProperties.put(
                    "timeAndFloat.type", "com.we.flink.cdcpipeline.test.MySQLTimeCustomConverter");

            /** 定义MySqlSource */
            MySqlSource<String> sqlSource =
                    MySqlSource.<String>builder()
                            .hostname(prop.getHost())
                            .port(prop.getPort())
                            .scanNewlyAddedTableEnabled(true) // 用于增加新表
                            .databaseList(prop.getDatabase())
                            .tableList(prop.getTableName())
                            .username(prop.getUserName())
                            .password(prop.getPassword())
                            .startupOptions(StartupOptions.initial())
                            .connectTimeout(Duration.ofSeconds(60))
                            .serverId(prop.getServerId())
                            .serverTimeZone("Asia/Shanghai")
                            .debeziumProperties(dbzProperties)
                            .deserializer(new JsonDebeziumDeserializationSchema(false))
                            .build();

            /** Flink Stream Source */
            SingleOutputStreamOperator<String> dataSource =
                    env.fromSource(sqlSource, WatermarkStrategy.noWatermarks(), pipeName)
                            .flatMap(
                                    new FlatMapFunction<String, String>() {
                                        @Override
                                        public void flatMap(String record, Collector<String> out)
                                                throws Exception {
                                            if (!record.isEmpty()) {
                                                JSONObject parseObject =
                                                        JSONObject.parseObject(record);
                                                JSONObject srcJson =
                                                        parseObject.getJSONObject("source");
                                                String dbName = srcJson.getString("db");
                                                String tableName = srcJson.getString("table");

                                                JSONObject afterJson =
                                                        parseObject.getJSONObject("after");
                                                if (afterJson != null) {
                                                    Long uid = afterJson.getLong("uid");
                                                    if (tableName != null && uid != null) {
                                                        if (dbName.contains("haohuan_db")
                                                                && "template_log"
                                                                        .equals(tableName)) {
                                                            tableName = "user_loan";
                                                        }
                                                        if (dbName.contains("haohuan_db")
                                                                && tableName.matches(
                                                                        "risk_tag_part[0-9]+")) {
                                                            tableName = "risk_tag";
                                                        }
                                                        afterJson.put("table", tableName);

                                                        out.collect(
                                                                afterJson.toString(
                                                                        SerializerFeature
                                                                                .WriteMapNullValue));

                                                        // 数据清理
                                                        parseObject.clear();
                                                        srcJson.clear();
                                                        afterJson.clear();
                                                    }
                                                } else {
                                                    logger.warn("Error record: " + record);
                                                }
                                            }
                                        }
                                    });

            /** Flink Stream Sink */
            KafkaSink<String> kafkaSink =
                    KafkaSink.<String>builder()
                            .setBootstrapServers(kfkBootStrapServer)
                            .setRecordSerializer(
                                    KafkaRecordSerializationSchema.builder()
                                            .setTopicSelector(
                                                    new TopicSelector<String>() {
                                                        @Override
                                                        public String apply(String record) {
                                                            JSONObject jsonObject =
                                                                    JSONObject.parseObject(record);
                                                            return topicPrefix
                                                                    + "_"
                                                                    + jsonObject.getString("table")
                                                                    + "_"
                                                                    + topicSuffix;
                                                        }
                                                    })
                                            .setKeySerializationSchema(
                                                    new SerializationSchema<String>() {
                                                        @Override
                                                        public byte[] serialize(String record) {
                                                            JSONObject jsonObject =
                                                                    JSONObject.parseObject(record);
                                                            long uid =
                                                                    jsonObject.getLongValue(
                                                                            keySeriaSchemaClm);
                                                            return String.valueOf(uid).getBytes();
                                                        }
                                                    })
                                            .setValueSerializationSchema(
                                                    new SerializationSchema<String>() {
                                                        @Override
                                                        public byte[] serialize(String record) {
                                                            JSONObject jsonObject =
                                                                    JSONObject.parseObject(record);
                                                            jsonObject.remove("table");
                                                            return jsonObject
                                                                    .toString(
                                                                            SerializerFeature
                                                                                    .WriteMapNullValue)
                                                                    .getBytes();
                                                        }
                                                    })
                                            .setPartitioner(
                                                    new FlinkKafkaPartitioner<String>() {
                                                        @Override
                                                        public int partition(
                                                                String record,
                                                                byte[] key,
                                                                byte[] value,
                                                                String topic,
                                                                int[] partitions) {
                                                            return Math.abs(
                                                                    new String(key).hashCode()
                                                                            % partitions.length);
                                                        }
                                                    })
                                            .build())
                            .build();

            dataSource.sinkTo(kafkaSink);
            env.execute(pipeName);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
