package cn.com.bluemoon.bd.flink.sink.kafka;

import cn.com.bluemoon.bd.flink.common.Constants;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.util.Preconditions;

/**
 * json string数据的kafka分区函数
 * 功能：根据指定的字段组合，将相同值的数据发送到同一个kafka分区
 * @author 王仁线
 * @version 1.0
 */
public class JsonStringKafkaPartitioner extends FlinkKafkaPartitioner<String> {
    private String[] keyFields;

    public JsonStringKafkaPartitioner(String... iKeyFields) {
        Preconditions.checkArgument(null != iKeyFields, "keys cannot be null");
        Preconditions.checkArgument(iKeyFields.length > 0, "the length of keys cannot be negative.");
        this.keyFields = iKeyFields;
    }

    @Override
    public int partition(String record, byte[] key, byte[] value, String topic, int[] partitions) {
        Preconditions.checkArgument(partitions != null && partitions.length > 0, "Partitions of the target topic is empty.");
        JSONObject jsonObj = JSON.parseObject(record);
        StringBuilder strBuf = new StringBuilder();

        for (String field : keyFields) {
           strBuf.append(jsonObj.getString(field)).append(Constants.DELIMITER);
        }

        String rowKey = strBuf.toString();
        return partitions[Math.abs(rowKey.hashCode()) % partitions.length];
    }

}
