/*
package cn.sunline.dreamvalue.service.impl;

import cn.sunline.dreamvalue.dao.KafkaFailMsgDao;
import cn.sunline.dreamvalue.dto.KafkaRetryRecordDTO;
import cn.sunline.dreamvalue.entity.KafkaFailMsg;
import cn.sunline.dreamvalue.service.KafkaRetryService;
import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.header.Header;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import java.nio.ByteBuffer;
import java.util.Calendar;
import java.util.Date;

*/
/**
 * @author 11292
 * @title: KafkaRetryServiceImpl
 * @projectName dream-value
 * @description: TODO
 * @date 2020/5/1012:36
 *//*

@Service
public class KafkaRetryServiceImpl implements KafkaRetryService {

    private static final Logger log = LoggerFactory.getLogger(KafkaRetryServiceImpl.class);

    */
/**
     * 消息消费失败后下一次消费的延迟时间(秒)
     * 第一次重试延迟15秒;第二次延迟30秒,第三次延迟1分钟...
     *//*

    private static final int[] RETRY_INTERVAL_SECONDS = {15, 30, 1*60, 2*60, 5*60, 10*60, 30*60, 1*60*60, 2*60*60};

    */
/**
     * 重试topic
     *//*

    @Value("${dreamValue.topic.retry}")
    private String retryTopic;
    @Autowired
    private KafkaTemplate<String, String> template;
    @Autowired
    private KafkaFailMsgDao kafkaFailMsgDao;

    public void consumerLater(ConsumerRecord<String, String> record){
        // 获取消息的已重试次数
        int retryTimes = getRetryTimes(record);
        Date nextConsumerTime = getNextConsumerTime(retryTimes);
        if(nextConsumerTime == null) {
            //超过重试次数的记录到mysql里边，后续人工处理
            KafkaFailMsg msg = new KafkaFailMsg();
            msg.setCtime(new Date());
            msg.setMsgKey(record.key());
            msg.setOffsetNum(record.offset());
            msg.setPartitionNum(record.partition());
            msg.setTopic(record.topic());
            msg.setMsgValue(record.value());
            kafkaFailMsgDao.insertSelective(msg);
            return;
        }

        KafkaRetryRecordDTO retryRecord = new KafkaRetryRecordDTO();
        retryRecord.setNextTime(nextConsumerTime.getTime());
        retryRecord.setTopic(record.topic());
        retryRecord.setRetryTimes(retryTimes);
        retryRecord.setKey(record.key());
        retryRecord.setValue(record.value());

        String value = JSON.toJSONString(retryRecord);
        try {
            template.send(retryTopic, null, value);
        }catch (Exception e){
            //发送失败的也需要记录到mysql中去
            KafkaFailMsg msg = new KafkaFailMsg();
            msg.setCtime(new Date());
            msg.setMsgKey(record.key());
            msg.setOffsetNum(record.offset());
            msg.setPartitionNum(record.partition());
            msg.setTopic(record.topic());
            msg.setMsgValue(record.value());
            kafkaFailMsgDao.insertSelective(msg);
        }
    }

    */
/**
     * 获取消息的已重试次数
     *//*

    private int getRetryTimes(ConsumerRecord record){
        int retryTimes = 1;
        for(Header header : record.headers()){
            if(KafkaRetryRecordDTO.KEY_RETRY_TIMES.equals(header.key())){
                ByteBuffer buffer = ByteBuffer.wrap(header.value());
                retryTimes = buffer.getInt();
            }
        }
        retryTimes++;
        return retryTimes;
    }

    */
/**
     * 获取待重试消息的下一次消费时间
     *//*

    private Date getNextConsumerTime(int retryTimes){
        // 重试次数超过上限,不再重试
        if(RETRY_INTERVAL_SECONDS.length <= retryTimes) {
            return null;
        }

        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.SECOND, RETRY_INTERVAL_SECONDS[retryTimes]);
        return calendar.getTime();
    }
}
*/
