package com.we.flink.cdcpipeline;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.we.flink.utils.WeCdcPropertyReader;
import com.we.flink.utils.WeKafkaPartitioner;
import com.we.flink.utils.WeKafkaTopicSelector;
import org.apache.kafka.connect.json.JsonConverterConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;

public class Hhl01RiskTag {
    public static final String RELEASEPROP = "cdcpipeline/Hhl01RiskTag.properties";
    public static Logger LOG = LoggerFactory.getLogger(Hhl01RiskTag.class);

    public static void main(String[] args) {
        try {
            HashMap<String, Object> customConfig = new HashMap<>();
            customConfig.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");

            WeCdcPropertyReader prop = WeCdcPropertyReader.init(RELEASEPROP);
            final String kfkTopicPrefix = prop.getKfkTopicPrefix();
            final String keySeriaSchemaClm = prop.getKeySeriaSchemaClm();
            String tablelist = "";
            String database = prop.getDatabase();
            String tableName = prop.getTableName();
            int splitNum = prop.getSplitNum();
            if (splitNum > 1) {
                for (int i = 0; i < splitNum; i++) {
                    tablelist += database + "." + tableName + "_" + i + ",";
                }
                tablelist = tablelist.substring(0, tablelist.length() - 1);
            } else {
                tablelist += database + "." + tableName;
            }

            MySqlSource<String> mysqlSource =
                    MySqlSource.<String>builder()
                            .hostname(prop.getHost())
                            .port(prop.getPort())
                            .scanNewlyAddedTableEnabled(true)
                            .databaseList(database)
                            .tableList(tablelist)
                            .username(prop.getUserName())
                            .password(prop.getPassword())
                            .deserializer(
                                    new JsonDebeziumDeserializationSchema(false, customConfig))
                            .build();

            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

            DataStreamSource<String> cdcInput =
                    env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), "Hhl01RiskTag");

            //            cdcInput.flatMap(new RichFlatMapFunction<String, String>() {
            //            })

            SingleOutputStreamOperator<String> out =
                    cdcInput.map(
                            new RichMapFunction<String, String>() {
                                @Override
                                public String map(String value) {
                                    JSONObject jsonObject = JSONObject.parseObject(value);
                                    JSONObject after = jsonObject.getJSONObject("after");
                                    JSONObject source = jsonObject.getJSONObject("source");
                                    String db = source.getString("db");
                                    String table = source.getString("table");
                                    after.put("db_name", db);
                                    after.put("table_name", table);
                                    return after.toString();
                                }
                            });

            KafkaRecordSerializationSchema<String> serializedRecord =
                    KafkaRecordSerializationSchema.builder()
                            .setTopicSelector(
                                    new WeKafkaTopicSelector(true, tableName, true, kfkTopicPrefix))
                            .setKeySerializationSchema(
                                    new SerializationSchema<String>() {
                                        @Override
                                        public byte[] serialize(String element) {
                                            JSONObject jsonObject = JSONObject.parseObject(element);
                                            String key = jsonObject.getString(keySeriaSchemaClm);
                                            return key.getBytes();
                                        }
                                    })
                            .setValueSerializationSchema(new SimpleStringSchema())
                            .setPartitioner(new WeKafkaPartitioner())
                            .build();

            KafkaSink<String> kfkSink =
                    KafkaSink.<String>builder()
                            .setBootstrapServers(prop.getKfkSinkAdd())
                            .setRecordSerializer(serializedRecord)
                            .build();

            out.sinkTo(kfkSink);

            env.execute(Hhl01RiskTag.class.toString());
        } catch (Exception e) {
            LOG.error(e.getMessage());
            e.printStackTrace();
        }
    }
}
