/*
package cn.sunline.dreamvalue.kafka;

import cn.sunline.dreamvalue.constant.DreamValueType;
import cn.sunline.dreamvalue.constant.RedisConstant;
import cn.sunline.dreamvalue.controller.DreamValueController;
import cn.sunline.dreamvalue.dao.DreamValueDao;
import cn.sunline.dreamvalue.dto.DreamValueAddInDTO;
import cn.sunline.dreamvalue.dto.KafkaRetryRecordDTO;
import cn.sunline.dreamvalue.entity.CustDreamValFlow;
import cn.sunline.dreamvalue.service.HbaseService;
import cn.sunline.dreamvalue.service.KafkaRetryService;
import cn.sunline.dreamvalue.utils.DreamValueNoGenerateUtil;
import cn.sunline.dreamvalue.utils.RedisOpsUtil;
import cn.sunline.dreamvalue.utils.SubTableUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ZSetOperations;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;

*/
/**
 * @author 11292
 * @title: DreamValueConsumer
 * @projectName dream-value
 * @description: 梦想值kafka相关消费端
 * @date 2020/5/813:53
 *//*

@Component
public class DreamValueConsumer {

    public static final Logger log = LoggerFactory.getLogger(DreamValueConsumer.class);
    @Autowired
    private DreamValueDao dreamValueDao;

    @Autowired
    private SubTableUtil subTableUtil;

    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    @Autowired
    private DreamValueNoGenerateUtil dreamValueNoGenerateUtil;

    @Autowired
    private RedisOpsUtil redisOpsUtil;

    @Autowired
    private KafkaRetryService kafkaRetryService;

    @Autowired
    private HbaseService hbaseService;

    //失败重试的次数
    @Value("${dreamValue.kafka.message.retry.count}")
    private Integer retryCount;

    //重试队列
    @Value("${dreamValue.topic.retry}")
    private String dreamValueTopicRetry;

    @Autowired
    private RedisTemplate redisTemplate;

    private static final String RETRY_KEY_ZSET = RedisConstant.TOPIC_RETRY_REIDS_KEY_PRIFIX + "retry_key";
    private static final String RETRY_VALUE_MAP = RedisConstant.TOPIC_RETRY_REIDS_KEY_PRIFIX + "retry_value";

    @KafkaListener(containerFactory = "manualBatchListenerContainerFactory",topics = {"${dreamvalue.topic.add}"},groupId = "${dreamvalue.add.consumer.group.id}")
    public void addListener(List<ConsumerRecord<String,String>> records, Acknowledgment ack){
        log.info("梦想值调增消费消息数量为 {}",records.size());
        for (ConsumerRecord<String, String> item : records) {
            log.info("梦想值调增消费处理数据内容：{}", item);
            DreamValueAddInDTO dto = null;
            try {
                dto = JSONObject.parseObject(item.value(),DreamValueAddInDTO.class);
                dreaValueAdd(dto);
            }catch (Exception e){
                //发生异常
                log.error("梦想值调增消费发生异常，消息内容为：{},{}",item,e.getMessage());
                //参数错误的消息直接丢弃
                if(null != dto){
                    //将发生异常的消息发送重试队列，其他消息继续消费
                    kafkaRetryService.consumerLater(item);
                }
            }
        }
        if(records.size() > 0){
            ack.acknowledge();
        }
    }

    */
/**
     　* @description: 重试队列
     　* @param [records, ack]
     　* @return void
     　* @author 11292
     　* @date 2020/5/9 13:27
     *//*

    @KafkaListener(containerFactory = "manualBatchListenerContainerFactory",topics = {"${dreamValue.topic.retry}"},groupId = "${dreamvalue.add.consumer.group.id}")
    public void addReTryListener(List<ConsumerRecord<String, String>> records, Acknowledgment ack) {
        log.info("梦想值调增消费重试消息数量为 {}",records.size());
        for (ConsumerRecord<String, String> item : records) {
            KafkaRetryRecordDTO kafkaRetryRecordDTO = JSON.parseObject(item.value(), KafkaRetryRecordDTO.class);
            log.info("梦想值调增消费重试处理数据内容：{}", item);
            DreamValueAddInDTO dto = null;
            try {
                dto = JSONObject.parseObject(kafkaRetryRecordDTO.getValue(),DreamValueAddInDTO.class);
                String key = RedisConstant.DREAM_VALUE_REDIS_KEY_PRIFIX+dto.getTransNo()+dto.getType() + DreamValueType.FLOW_TYPE_ADD.getCode();
                // 通过redis的zset进行时间排序
                redisTemplate.opsForHash().put(RETRY_VALUE_MAP, key, item.value());
                redisTemplate.opsForZSet().add(RETRY_KEY_ZSET, key, kafkaRetryRecordDTO.getNextTime());
            }catch (Exception e){
                //发生异常
                log.error("梦想值调增消费重试发生异常，消息内容为：{},异常内容：{}",item,e.getMessage());
            }
        }
        if(records.size() > 0){
            ack.acknowledge();
        }
    }

    */
/**
     　* @description: 具体调增逻辑实现
     　* @param [dto]
     　* @return void
     　* @author 11292
     　* @date 2020/5/9 11:48
     *//*

    @Transactional(rollbackFor = Exception.class)
    public void dreaValueAdd(DreamValueAddInDTO dto) throws Exception {
        //检查消息体格式
        if(null == dto || StringUtils.isEmpty(dto.getDreamValueNo()) || null == dto.getDreamValue() || dto.getDreamValue() == 0
                || StringUtils.isEmpty(dto.getDate()) || StringUtils.isEmpty(dto.getTime()) ||
                StringUtils.isEmpty(dto.getType()) || StringUtils.isEmpty(dto.getTransNo()) || StringUtils.isEmpty(dto.getTransDesc())){
            return ;//消息体有错误
        }
        //计算分表后缀
        Integer suffix = subTableUtil.getTableSuffix(dto.getDreamValueNo());
        //查询是否已经做过调增
        Integer result = dreamValueDao.queryDetailIsExistByTransNo(suffix,dto.getTransNo(), DreamValueType.FLOW_TYPE_ADD.getCode(),dto.getType());
        if(null != result){
            return ;//已经做过调增
        }
        //调增梦想值
        dreamValueDao.updateDreamValue(suffix,dto.getDreamValue(),dto.getType(),dto.getDreamValueNo());
        //调增梦想值余额
        dreamValueDao.updateDreamValueBalance(suffix,dto.getDreamValue(),dto.getDreamValueNo());
        //获取流水号
        String flowNo = dreamValueNoGenerateUtil.generateDreamValueFlowNo(DreamValueType.FLOW_TYPE_ADD.getCode());
        //构建流水
        CustDreamValFlow flow = new CustDreamValFlow();
        flow.setFlowNo(flowNo);
        flow.setSuffix(suffix);
        copyDreamValueAddProperty(dto,flow);
        //记录流水到mysql
        dreamValueDao.insertDreamValueFlow(flow);
        //记录流水到hyperbase
        hbaseService.insertCustDreamValFlow(flow);
    }

    */
/**
     　* @description: 拷贝dto属性到流水实体
     　* @param [dto, flow]
     　* @return void
     　* @author 11292
     　* @date 2020/5/9 11:36
     *//*

    private void copyDreamValueAddProperty(DreamValueAddInDTO dto, CustDreamValFlow flow) throws Exception {
        DateFormat df = new SimpleDateFormat("yyyyMMddHHmmss");
        flow.setDreamAcctNo(dto.getDreamValueNo());
        flow.setDreamAcctType(dto.getType());
        flow.setDreamVal(dto.getDreamValue());
        flow.setInsertTime(new Date());
        flow.setOperType(DreamValueType.FLOW_TYPE_ADD.getCode());
        flow.setTxDate(dto.getDate());
        flow.setTxTime(dto.getTime());
        flow.setTxSeqNo(dto.getTransNo());
        flow.setTxDesc(dto.getTransDesc());
        flow.setTxDateTime(df.parse(dto.getDate()+dto.getTime()));
    }


    */
/**
     * 定时任务从redis读取到达重试时间的消息,发送到对应的topic
     *//*

    @Scheduled(cron="0/10 * * * * *")
    public void retryFormRedis() {
        long currentTime = System.currentTimeMillis();
        Set<ZSetOperations.TypedTuple<Object>> typedTuples =
                redisTemplate.opsForZSet().reverseRangeByScoreWithScores(RETRY_KEY_ZSET, 0, currentTime);
        redisTemplate.opsForZSet().removeRangeByScore(RETRY_KEY_ZSET, 0, currentTime);
        for(ZSetOperations.TypedTuple<Object> tuple : typedTuples){
            String key = tuple.getValue().toString();
            String value = redisTemplate.opsForHash().get(RETRY_VALUE_MAP, key).toString();
            KafkaRetryRecordDTO retryRecord = null;
            try {
                retryRecord = JSON.parseObject(value, KafkaRetryRecordDTO.class);
                ProducerRecord record = retryRecord.parse();
                kafkaTemplate.send(record);
                redisTemplate.opsForHash().delete(RETRY_VALUE_MAP, key);
            }catch (Exception e){
                // TODO 发生异常将发送失败的消息重新扔回redis
                if(null != retryRecord){
                    redisTemplate.opsForZSet().add(RETRY_KEY_ZSET, key, retryRecord.getNextTime());
                }
            }
        }

    }
}
*/
