package cn.gwm.flink.streaming.sink.kafka;

import cn.hutool.json.JSONConfig;
import cn.hutool.json.JSONUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * @Author: liangguang
 * @Date: 2022/4/12
 * @Description:
 */
@Slf4j
public class KafkaSinkProducer {

    public static final JSONConfig jsonConfig = JSONConfig.create();

    static {
        jsonConfig.setIgnoreNullValue(false);
    }

    public static FlinkKafkaProducer producerBuilder(String topic, Properties producerConfig, FlinkKafkaProducer.Semantic semantic) {
        return producerBuilder(topic, null, producerConfig, semantic, StringUtils.EMPTY);
    }

    public static FlinkKafkaProducer producerBuilder(String topic, Properties producerConfig, String key) {
        return producerBuilder(topic, null, producerConfig, FlinkKafkaProducer.Semantic.EXACTLY_ONCE, key);
    }

    public static FlinkKafkaProducer producerBuilder(String topic, Properties producerConfig) {
        return producerBuilder(topic, producerConfig, StringUtils.EMPTY);
    }

    public static FlinkKafkaProducer producerBuilder(String topic, KafkaSerializationSchema serializationSchema,
                                                     Properties producerConfig, FlinkKafkaProducer.Semantic semantic, String key) {
        if (serializationSchema == null) {
            serializationSchema = (Object data, Long along) -> {
                if (StringUtils.isNotBlank(key)) {
                    return new ProducerRecord<byte[], byte[]>(topic, JSONUtil.parseObj(data).getStr(key).getBytes(StandardCharsets.UTF_8), JSONUtil.toJsonStr(data, jsonConfig).getBytes(StandardCharsets.UTF_8));
                } else {
                    return new ProducerRecord<byte[], byte[]>(topic, JSONUtil.toJsonStr(data, jsonConfig).getBytes(StandardCharsets.UTF_8));
                }
            };
        }
        FlinkKafkaProducer myProducer = new FlinkKafkaProducer(topic, serializationSchema, producerConfig, semantic);
        log.error("kafka生产者构造，topic={}",topic);
        return myProducer;
    }

}
