package cn.com.bluemoon.bd.flink.sink;

import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;

import javax.annotation.Nullable;
import java.io.InputStream;
import java.util.Optional;
import java.util.Properties;

public class KafkaSinkCreater {
    private Properties producerConfig = new Properties();

    public KafkaSinkCreater() {
        try {
            InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("kafka.properties");
            ParameterTool parameterTool = ParameterTool.fromPropertiesFile(inputStream);
            String servers = parameterTool.get("bootstrap.servers");

            if (StringUtils.isBlank(servers)) {
                throw new Exception("bootstrap.servers不能为空");
            }

            this.producerConfig.setProperty("bootstrap.servers", servers);
            this.producerConfig.put("acks", "all");
            this.producerConfig.put("retries", 3);
            this.producerConfig.put("batch.size", 16384);
            this.producerConfig.put("linger.ms", 0);
            this.producerConfig.put("buffer.memory", 33554432);
        } catch (Exception e) {
            throw new RuntimeException("缺少属性文件kafka.properties", e);
        }
    }

    public <T> SinkFunction<T> create(String topic, SerializationSchema<T> schema) {
        FlinkKafkaProducer<T> sink = new FlinkKafkaProducer<>(topic, schema, this.producerConfig);
        sink.setWriteTimestampToKafka(true);
        return sink;
    }

    public <T> SinkFunction<T> create(String topic, SerializationSchema<T> schema, @Nullable FlinkKafkaPartitioner<T> partitioner) {
        FlinkKafkaProducer<T> sink = new FlinkKafkaProducer(topic, schema, this.producerConfig, Optional.ofNullable(partitioner));
        sink.setWriteTimestampToKafka(true);
        return sink;
    }

    public SinkFunction<String> create(String topic) {
        return this.create(topic, new SimpleStringSchema());
    }

    public SinkFunction<String> create(String topic, @Nullable FlinkKafkaPartitioner<String> partitioner) {
        return this.create(topic, new SimpleStringSchema(), partitioner);
    }

}
