package com.cupdata.sms.kafka;

import com.cupdata.sms.config.ConfigProperties;
import com.cupdata.sms.core.CusKafkaProperties;
import com.cupdata.sms.core.FixedSizeSet;
import com.cupdata.sms.core.SyncOperatorType;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.SmartInitializingSingleton;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;

/**
 * @author: zda
 * @description: TODO
 * @date: 2024/5/6 14:19
 */
@Slf4j
@Data
@EnableConfigurationProperties(ConfigProperties.class)
@ConditionalOnProperty(prefix = "channel-core",value = "enableDefaultKafka",matchIfMissing = true,havingValue = "true")
@Component
public class KafkaDealMsgTemplate implements ApplicationContextAware, SmartInitializingSingleton, DisposableBean {
    private static ApplicationContext applicationContext;
    private static ExecutorService executorService = null;
    @Autowired
    private RedisTemplate<String,String> redisTemplate;
    @Resource
    private KafkaConsumerService kafkaConsumerService;
    private static AtomicBoolean isRunning = new AtomicBoolean(true);
    public static BlockingQueue<SyncOperatorType> syncQueue = new LinkedBlockingQueue<>();
    @Override
    public void destroy() throws Exception {
        Runtime.getRuntime().addShutdownHook(new Thread(()->{
            executorService.shutdown();
            try {
                if(!executorService.awaitTermination(10, TimeUnit.SECONDS)){
                    executorService.shutdownNow();
                }
            }catch (InterruptedException ex){
                executorService.shutdownNow();
                Thread.currentThread().interrupt();
            }
        }));
    }

    @Override
    public void afterSingletonsInstantiated() {
        Map<String, MessageDealProcessor> dealProcesserMap = applicationContext.getBeansOfType(MessageDealProcessor.class);
        if(!dealProcesserMap.isEmpty()){
            checkRedisConfig(dealProcesserMap);
            executorService = Executors.newFixedThreadPool(dealProcesserMap.size());
            for (MessageDealProcessor dealProcessor: dealProcesserMap.values()){
                executorService.submit(new ConsumerBiz(dealProcessor));
            }
        }
    }

    private void checkRedisConfig(Map<String, MessageDealProcessor> dealProcesserMap) {
        long count = dealProcesserMap.values().stream().filter(x -> x.getKafkaProperties().getNeedIdempotence()).count();
        if(count>0){
            try {
                redisTemplate.execute((RedisCallback<Object>) connection -> connection.get("some-non-existing-key".getBytes()));
            }catch (Exception ex){
                throw new RuntimeException("开启幂等需要有可用的redis客户端");
            }
        }
    }


    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        KafkaDealMsgTemplate.applicationContext = applicationContext;
    }



    class ConsumerBiz implements Runnable{
        private FixedSizeSet idempotenceKeys;
        private MessageDealProcessor dealProcessor;
        public ConsumerBiz(MessageDealProcessor dealProcessor){
            this.dealProcessor=dealProcessor;
            CusKafkaProperties kafkaProperties = dealProcessor.getKafkaProperties();
            String topicName = dealProcessor.getKafkaProperties().getTopicName();
            idempotenceKeys = new FixedSizeSet(kafkaProperties.getIdempotenceSize());
            Set<String> members = redisTemplate.opsForSet().members(topicName);
            for (String msgKey:members){
                idempotenceKeys.add(msgKey);
            }
            new Thread(() -> {
                while (true) {
                    try {
                        SyncOperatorType syncOperatorType = syncQueue.take();
                        if(syncOperatorType.getType().equals(0)){
                            redisTemplate.opsForSet().add(topicName,syncOperatorType.getItem());
                        }else {
                            redisTemplate.opsForSet().remove(topicName,syncOperatorType.getItem());
                        }
                    } catch (InterruptedException e) {
                        Thread.currentThread().interrupt();
                        break;
                    }
                }
            }).start();
        }
        @Override
        public void run() {
            CusKafkaProperties kafkaProperties = dealProcessor.getKafkaProperties();
            KafkaConsumer<String, byte[]> kafkaConsumer = kafkaConsumerService.createConsumer(kafkaProperties);
            try {
                kafkaConsumer.subscribe(Collections.singletonList(kafkaProperties.getTopicName()));
                while (isRunning.get()){
                    ConsumerRecords<String, byte[]> consumerRecords = kafkaConsumer.poll(Duration.ofMillis(kafkaProperties.getDurationMs()));
                    if(!consumerRecords.isEmpty()){
                        dealProcessor.preDeal(consumerRecords);
                        CountDownLatch countDownLatch = new CountDownLatch(consumerRecords.count());
                        for (ConsumerRecord<String,byte[]> record:consumerRecords){
                            if(idempotenceKeys.contains(record.key())){
                                log.warn("消息已消费过：{}",record.key());
                                continue;
                            }
                            try {
                                dealProcessor.theadPoolDeal(record);
                            }catch (Exception ex){
                                log.error("deal processor theadPool deal error:",ex.getMessage());
                                dealProcessor.theadPoolDealException(record);
                            }finally {
                                idempotenceKeys.add(record.key());
                                syncQueue.add(SyncOperatorType.builder().item(record.key()).type(0).build());
                                countDownLatch.countDown();
                            }
                        }
                        try {
                            countDownLatch.await();
                        } catch (InterruptedException e) {
                            log.warn("consumer worker of countDownLatch await ex:{}",e.getMessage());
                            Thread.currentThread().interrupt();
                        }
                        dealProcessor.postDeal(consumerRecords);
                        try {
                            kafkaConsumer.commitAsync();
                        }catch (Exception ex){
                            log.warn("consumer commit async ex:{}",ex.getMessage());
                            kafkaConsumer.commitSync();
                        }

                    }
                }
            }catch (WakeupException ex){
                if(!isRunning.get()){
                    throw ex;
                }
            }catch (Exception ex){
                log.error("kafka consumer have a exception:{}",ex.getMessage());
            }finally {
                isRunning.set(false);
                if(kafkaConsumer!=null){
                    kafkaConsumer.close();
                }
            }
        }

    }
}
