package com.ot.flink.cdc.function.sink;

import com.alibaba.fastjson.JSON;
import com.ot.flink.cdc.config.CdcTargetConfig;
import com.ot.flink.cdc.dto.TableLogDto;
import com.ot.flink.cdc.util.SpringUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.MessageFormat;
import java.util.Properties;

/**
 * @Description: kafka
 * @author: admin
 * @date: 2024/4/19
 */
public class KafkaSink extends RichSinkFunction<TableLogDto> {

    /** 日志 */
    private static Logger LOGGER = LoggerFactory.getLogger(KafkaSink.class);

    /** kafa client */
    private transient KafkaProducer<String, String> kafkaProducer;

    private String topic;

    @Override
    public void open(Configuration parameters) throws Exception {
        CdcTargetConfig targetConfig = SpringUtil.getBean(CdcTargetConfig.class);
        this.topic = targetConfig.getKafkaTopic();
        Properties prop = new Properties();
        prop.put("bootstrap.servers", targetConfig.getKafkaHosts());
        prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        prop.put("acks", "all");
        prop.put("retries", 0);
        prop.put("batch.size", 16384);
        prop.put("linger.ms", 10);
        prop.put("buffer.memory", 33554432);
        if (StringUtils.isNotBlank(targetConfig.getKafkaProtocol())) {
            prop.setProperty("security.protocol", targetConfig.getKafkaProtocol());
            prop.setProperty("sasl.mechanism", targetConfig.getKafkaMechanism());
            prop.setProperty("sasl.jaas.config", targetConfig.getKafkaConfig());
        }
//        //
        kafkaProducer = new KafkaProducer<>(prop);
    }

    @Override
    public void close() throws Exception {
        if (kafkaProducer != null) {
            kafkaProducer.close();
        }
    }

    @Override
    public void invoke(TableLogDto value, Context context) throws Exception {
        String finalTopic = this.topic;
        if (StringUtils.isBlank(finalTopic)) {
            finalTopic = MessageFormat.format("{0}.{1}", value.getDatabase(), value.getTable());
        }
        String kafkaMsg = JSON.toJSONString(value);
        LOGGER.info("{} - {}", value.getTable(), kafkaMsg);
        kafkaProducer.send(new ProducerRecord<>(finalTopic, value.getTable(), JSON.toJSONString(value)));
    }
}
