package com.we.flink.cdcpipeline;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.we.flink.utils.WeCdcPropertyReader;
import com.we.flink.utils.WeKafkaPartitioner;
import com.we.flink.utils.WeKafkaTopicSelector;
import org.apache.kafka.connect.json.JsonConverterConfig;

import java.util.HashMap;

public class CdcPipeline {

    public static void CaptureMysqlSource(String propFile, String pipelineName) throws Exception {

        HashMap<String, Object> customConfig = new HashMap<>();
        customConfig.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");

        WeCdcPropertyReader prop = WeCdcPropertyReader.init(propFile);
        final String kfkTopicPrefix = prop.getKfkTopicPrefix();
        final String keySeriaSchemaClm = prop.getKeySeriaSchemaClm();
        String tablelist = "";
        String database = prop.getDatabase();
        String tableName = prop.getTableName();

        if (tableName.contains(",")) {
            String[] splits = tableName.split(",");
            for (String split : splits) {
                tablelist += database + "." + split + ",";
            }
            tablelist = tablelist.substring(0, tablelist.length() - 1);
        } else {
            tablelist += database + "." + tableName;
        }

        MySqlSource<String> mysqlSource =
                MySqlSource.<String>builder()
                        .hostname(prop.getHost())
                        .port(prop.getPort())
                        .scanNewlyAddedTableEnabled(true)
                        .databaseList(database)
                        .tableList(tablelist)
                        .username(prop.getUserName())
                        .password(prop.getPassword())
                        .serverId("7400-7406")
                        .deserializer(new JsonDebeziumDeserializationSchema(false, customConfig))
                        .build();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> cdcInput =
                env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), "Hhl10ProdSource");
        /**
         * { "before":null, "after":{ "id":"Dg==", "uid":200085,
         * "request_id":"ab8414837d08415a85b674a01089b077", "audit_time":1649850429000,
         * "audit_status":3, "create_time":1649850286000, "update_time":1649850429000, "deleted":0
         * }, "source":{ "version":"1.5.4.Final", "connector":"mysql", "name":"mysql_binlog_source",
         * "ts_ms":0, "snapshot":"false", "db":"haohuan_db_dev", "sequence":null,
         * "table":"user_amount_retrial_audit", "server_id":0, "gtid":null, "file":"", "pos":0,
         * "row":0, "thread":null, "query":null }, "op":"r", "ts_ms":1651754344539,
         * "transaction":null }
         */
        SingleOutputStreamOperator<String> out =
                cdcInput.flatMap(
                        new RichFlatMapFunction<String, String>() {
                            @Override
                            public void flatMap(String value, Collector<String> out)
                                    throws Exception {
                                JSONObject input = JSONObject.parseObject(value);
                                JSONObject source = input.getJSONObject("source");
                                String database = source.getString("db");
                                String table = source.getString("table");

                                JSONObject before = input.getJSONObject("before");
                                if (before != null) {
                                    JSONObject clone = (JSONObject) before.clone();
                                    if (clone.containsKey(keySeriaSchemaClm)
                                            && clone.getString(keySeriaSchemaClm) != null) {
                                        clone.put("flag", false);
                                        clone.put("db_name", database);
                                        clone.put("table_name", table);
                                        out.collect(clone.toString());
                                    }
                                }
                                JSONObject after = input.getJSONObject("after");
                                if (after != null) {
                                    JSONObject clone = (JSONObject) after.clone();
                                    if (clone.containsKey(keySeriaSchemaClm)
                                            && clone.getString(keySeriaSchemaClm) != null) {
                                        clone.put("flag", true);
                                        clone.put("db_name", database);
                                        clone.put("table_name", table);
                                        out.collect(clone.toString());
                                    }
                                }
                            }
                        });

        KafkaRecordSerializationSchema<String> serializedRecord =
                KafkaRecordSerializationSchema.builder()
                        .setTopicSelector(
                                new WeKafkaTopicSelector(false, null, true, kfkTopicPrefix))
                        .setKeySerializationSchema(
                                new SerializationSchema<String>() {
                                    @Override
                                    public byte[] serialize(String element) {
                                        JSONObject input = JSONObject.parseObject(element);
                                        String key = input.getString(keySeriaSchemaClm);
                                        return key.getBytes();
                                    }
                                })
                        .setValueSerializationSchema(
                                new SerializationSchema<String>() {
                                    @Override
                                    public byte[] serialize(String element) {
                                        return element.getBytes();
                                    }
                                })
                        .setPartitioner(new WeKafkaPartitioner())
                        .build();

        KafkaSink<String> kfkSink =
                KafkaSink.<String>builder()
                        .setBootstrapServers(prop.getKfkSinkAdd())
                        .setRecordSerializer(serializedRecord)
                        .build();

        out.sinkTo(kfkSink);

        env.execute(pipelineName);
    }
}
