package com.alipay.sofa.demo.db.core.consumer;

import com.alipay.sofa.demo.db.common.entity.StudentInfo;
import com.alipay.sofa.demo.db.common.entity.StudentInfoDTO;
import com.alipay.sofa.demo.db.common.exception.OperateDataException;
import com.alipay.sofa.demo.db.common.service.StudentInsertService;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * 插入student消费者
 * @Author Zhang Da
 * @Date 2020/2/15 19:45
 * @Version 1.0
 */
@Component
public class StudentConsumer {
    @Resource
    StudentInsertService studentInsertService;
    @Resource
    RedisClient redisClient;

    /**
     * 消费成功消息数量
     */
    private static AtomicInteger successConsumeCount = new AtomicInteger();

    /**
     * 插入数据数量（一条消息中含有多条数据）
     */
    private static AtomicInteger insertSuccessCount = new AtomicInteger();

    private static long oldSuccessConsumeCount = 0;;
    private static long oldInsertSuccessCount = 0;;
    private static int timerIndex = 1;

    private static final Logger logger = LoggerFactory.getLogger(StudentConsumer.class);

    private static final int CORE_THREAD_COUNT = 16;
    private static ExecutorService pool = new ThreadPoolExecutor(CORE_THREAD_COUNT, CORE_THREAD_COUNT*20, 300, TimeUnit.SECONDS,
            new LinkedBlockingQueue<>(CORE_THREAD_COUNT), Executors.defaultThreadFactory(),
            new ThreadPoolExecutor.AbortPolicy());

    @KafkaListener(groupId = "stuGroup", topics = "insertStu")
    public void consumerForStudent(ConsumerRecords<String, StudentInfoDTO> records) {
        ArrayList<StudentInfo> list = new ArrayList<StudentInfo>();
        for(ConsumerRecord<String, StudentInfoDTO> consumerRecord : records){
            if(consumerRecord != null && consumerRecord.value() != null){
                list.addAll(consumerRecord.value().getStudentInfoList());
            }
        }

        try {
            studentInsertService.insertStudents(list);
            successConsumeCount.addAndGet(records.count());
            insertSuccessCount.addAndGet(list.size());
        } catch (OperateDataException e) {
            logger.error(e.getMessage());
        }

        for(ConsumerRecord<String, StudentInfoDTO> consumerRecord : records){
            consumerRecord.value().setConsumeSuccess(true);
            consumerRecord.value().setStudentInfoList(null);
            redisClient.setByProtostuff(consumerRecord.value().getMessageId(), consumerRecord.value(), StudentInfoDTO.class);
        }
//        consumer.commitAsync();
    }

    @Scheduled(fixedDelay = 1000)
    public void excuteTimer(){
        //倒计时，每秒打印
        long globalConsumeTotal = successConsumeCount.get();
        long oneSecondConsumeTotal = globalConsumeTotal - oldSuccessConsumeCount;
        oldSuccessConsumeCount = globalConsumeTotal;

        long globalInsertTotal = insertSuccessCount.get();
        long oneSecondInsertTotal = globalInsertTotal - oldInsertSuccessCount;
        oldInsertSuccessCount = globalInsertTotal;

        logger.info("消费来自kafka的数据，1秒消费消息成功数: " + oneSecondConsumeTotal);
        logger.info("消费来自kafka的数据，1秒插入数据成功数: " + oneSecondInsertTotal);
        logger.info(String.format("消费消息成功数total: %d", successConsumeCount.get()));
        logger.info(String.format("插入数据成功数total: %d", insertSuccessCount.get()));
        logger.info(String.format("消费消息平均每秒tps: %d", successConsumeCount.get()/timerIndex));
        logger.info(String.format("插入数据平均每秒tps: %d", insertSuccessCount.get()/timerIndex));
        timerIndex ++ ;
    }
}
