package com.sui.bigdata.flink.sql.sink.kafka;

import com.sun.istack.Nullable;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.streaming.connectors.kafka.KafkaContextAware;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.kafka.clients.producer.ProducerRecord;

/**
 * @author YongChen
 * @date 2020/6/17 14:10
 * @description
 * @email yong_chen@sui.com
 */
public class KafkaSerializationSchemaWrapper<T>  implements KafkaSerializationSchema<T>, KafkaContextAware<T> {

    private final FlinkKafkaPartitioner<T> partitioner;
    private final SerializationSchema<T> serializationSchema;
    private final String topic;

    private int[] partitions;


    public KafkaSerializationSchemaWrapper(
            String topic,
            FlinkKafkaPartitioner<T> partitioner,
            SerializationSchema<T> serializationSchema) {
        this.partitioner = partitioner;
        this.serializationSchema = serializationSchema;
        this.topic = topic;
    }


    @Override
    public ProducerRecord<byte[], byte[]> serialize(
            T element,
            @Nullable Long timestamp) {
        byte[] serialized = serializationSchema.serialize(element);
        final Integer partition;
        if (partitioner != null) {
            partition = partitioner.partition(element, null, serialized, topic, partitions);
        } else {
            partition = null;
        }

        return new ProducerRecord<>(topic, partition, null, serialized);
    }

    @Override
    public String getTargetTopic(T element) {
        return topic;
    }

    @Override
    public void setPartitions(int[] partitions) {
        this.partitions = partitions;
    }
}
