package cc.laop.mq.kafka.producer;

import cc.laop.mq.kafka.KafkaConstants;
import com.alibaba.fastjson.JSON;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import org.springframework.util.concurrent.ListenableFuture;

import java.util.HashMap;
import java.util.Map;
import java.util.Random;

/**
 * Created by Pengpeng on 2017/5/26.
 */
@Component
public class ProducerServer  {

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    public Map<String, Object> sned(String topic, Object value, boolean ifPartition, Integer partitionNum, String
            role) {
        String key = role + "-" + value.hashCode();
        String valueString = JSON.toJSONString(value);
        if (ifPartition) {
            int partitionIndex = getPartitionIndex(key, partitionNum);
            ListenableFuture<SendResult<String, String>> result = kafkaTemplate.send(topic, partitionIndex, key,
                    valueString);
            return checkProRecord(result);
        } else {
            ListenableFuture<SendResult<String, String>> result = kafkaTemplate.send(topic, key, valueString);
            return checkProRecord(result);
        }

    }

    private int getPartitionIndex(String key, int partitionNum) {
        if (StringUtils.isEmpty(key)) {
            Random random = new Random();
            return random.nextInt(partitionNum);
        } else {
            int result = Math.abs(key.hashCode() % partitionNum);
            return result;
        }
    }

    private Map<String, Object> checkProRecord(ListenableFuture<SendResult<String, String>> res) {
        Map<String, Object> m = new HashMap<>();
        if (res != null) {
            try {
                SendResult r = res.get();//检查result结果集
                /*检查recordMetadata的offset数据，不检查producerRecord*/
                Long offsetIndex = r.getRecordMetadata().offset();
                if (offsetIndex != null && offsetIndex >= 0) {
                    m.put("code", KafkaConstants.SUCCESS_CODE);
                    m.put("message", KafkaConstants.SUCCESS_MES);
                    return m;
                } else {
                    m.put("code", KafkaConstants.KAFKA_NO_OFFSET_CODE);
                    m.put("message", KafkaConstants.KAFKA_NO_OFFSET_MES);
                    return m;
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
                m.put("code", KafkaConstants.KAFKA_SEND_ERROR_CODE);
                m.put("message", KafkaConstants.KAFKA_SEND_ERROR_MES);
                return m;
            } catch (Exception e) {
                e.printStackTrace();
                m.put("code", KafkaConstants.KAFKA_SEND_ERROR_CODE);
                m.put("message", KafkaConstants.KAFKA_SEND_ERROR_MES);
                return m;
            }
        } else {
            m.put("code", KafkaConstants.KAFKA_NO_RESULT_CODE);
            m.put("message", KafkaConstants.KAFKA_NO_RESULT_MES);
            return m;
        }
    }


}
