package com.dahua.messagesyncsh.kafka;

import cn.hutool.core.date.DateTime;
import com.dahua.messagesyncsh.config.redis.RedisUtil;
import com.dahua.messagesyncsh.kafka.runner.CModeHandleRunner;
import com.dahua.messagesyncsh.kafka.runner.ControlHandleRunner;
import com.dahua.messagesyncsh.kafka.runner.FeatureHandleRunner;
import com.dahua.messagesyncsh.service.CollectionService;
import com.dahua.messagesyncsh.service.EquipmentService;
import com.dahua.messagesyncsh.service.ProcessorService;
import com.dahua.messagesyncsh.service.SensorDataService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.redisson.api.RedissonClient;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.List;

import static com.dahua.messagesyncsh.common.Constants.IS_INITIALIZE;

@Component
@Slf4j
public class Consumer {
    private static final Integer MAX_SIZE = 500;

    @Resource
    private RedisUtil redisUtil;

    @Resource
    private RedissonClient redissonClient;

    @Resource
    private EquipmentService equipmentService;

    @Resource
    private SensorDataService sensorDataService;

    @Resource
    private CollectionService collectionService;


    @Resource
    private ControlHandleRunner controlHandleRunner;

    @Resource
    private CModeHandleRunner cModeHandleRunner;

    @Resource
    private FeatureHandleRunner featureHandleRunner;


    @Resource
    private ProcessorService processorService;

    @KafkaListener(topics = {"CONTROL_DATA"}, groupId = "FORTRESS_PROD")
    public void onControl(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
        if (IS_INITIALIZE) {
            if (consumerRecords.size() >= MAX_SIZE) {
                ControlHandleRunner controlHandleRunner = new ControlHandleRunner(processorService,consumerRecords);
                Thread thread = new Thread(controlHandleRunner);
                thread.start();
            } else {
                controlHandleRunner.handle(consumerRecords);
            }
            ack.acknowledge();
        }
    }
    @KafkaListener(topics = {"CMODE_DATA"}, groupId = "FORTRESS_PROD")
    public void onCMode(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
        if (IS_INITIALIZE) {
            long his = System.currentTimeMillis();

            if (consumerRecords.size() >= MAX_SIZE){
                CModeHandleRunner cModeHandleRunner = new CModeHandleRunner(
                        redisUtil, redissonClient, equipmentService, sensorDataService, processorService, collectionService, consumerRecords);
                Thread thread = new Thread(cModeHandleRunner);
                thread.start();
            }else{
                cModeHandleRunner.handle(consumerRecords);
            }
            long now = System.currentTimeMillis();

            log.info("【CMode】：处理{}条的结束时间{}", consumerRecords.size(), now - his);
            ack.acknowledge();
        }
    }

    @KafkaListener(topics = {"FEATURE_DATA"}, groupId = "FORTRESS_PROD")
    public void onFeature(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment ack) {
        if (IS_INITIALIZE) {
            if (consumerRecords.size() >= MAX_SIZE){
                FeatureHandleRunner featureHandleRunner = new FeatureHandleRunner(
                        redisUtil, redissonClient, equipmentService, sensorDataService, processorService, collectionService, consumerRecords);
                Thread thread = new Thread(featureHandleRunner);
                thread.start();
            }else {
                featureHandleRunner.handle(consumerRecords);
            }
            ack.acknowledge();
        }
    }
}
