package com.insight.common.mq;

import com.alibaba.fastjson.JSONObject;
import com.yuanqiao.insight.common.util.common.RedisStreamUtil;
import com.yuanqiao.insight.common.util.common.RedisUtils;
import io.lettuce.core.api.async.RedisAsyncCommands;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import com.insight.common.constant.CacheConstant;
import com.insight.common.mq.stream.RedisMqStream;
import com.insight.common.mq.stream.Streams;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.connection.stream.Consumer;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.ReadOffset;
import org.springframework.data.redis.connection.stream.StreamOffset;
import org.springframework.data.redis.stream.StreamListener;
import org.springframework.data.redis.stream.StreamMessageListenerContainer;
import org.springframework.data.redis.stream.Subscription;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;
import java.util.Optional;

/**
 * @author JinPengDong
 * @date 2023/10/10
 */
@Slf4j
@Component
@RequiredArgsConstructor
public class DefaultRedisMq implements RedisMq {

    private final StreamMessageListenerContainer<String, MapRecord<String, String, String>> streamMessageListenerContainer;
    private final RedisStreamUtil redisStreamUtil;
    private final RedisUtils redisUtils;
    private final RedisAsyncCommands<String, String> asyncCommands;

    @Value("${spring.redis.stream.ack.batch-size}")
    private Integer ackBatchSize;
    @Value("${spring.redis.stream.ack.interval}")
    private Integer ackInterval;

    @Value("${spring.redis.stream.idle.page-size}")
    private Integer pageSize;
    @Value("${spring.redis.stream.idle.parallelism}")
    private Integer parallelism;
    @Value("${spring.redis.stream.idle.time}")
    private Long idleTimeout;
    @Value("${spring.redis.stream.idle.interval}")
    private Long idleInterval;

    public final static Map<String, AckFlusher> consumerMap = new HashMap<>();

    @Override
    public void publish(String streamKey, JSONObject content) {
        //初始化stream后才可以发布消息
        Optional<Boolean> optional = Optional.ofNullable(RedisMqInit.isSubscribe.get(streamKey));
        if (optional.orElse(false)) {
            //每个stream最多保留最新的10000条数据，防止内存溢出
            redisStreamUtil.trim(streamKey, 1000);
            JSONObject data = new JSONObject();
            data.put("content", content.toJSONString());
            redisStreamUtil.addObj(streamKey, data);
            log.info("消息已发布, 订阅频道: {}", streamKey);
        }
    }

    @Override
    public void subscribe(StreamListener<String, MapRecord<String, String, String>> listener, Streams streams) {
        try {
            final RedisMqStream value = streams.getValue();
            if (value == null){
                log.error("订阅参数为 null");
                return;
            }
            String stream = value.getName();
            String group = value.getGroup();

            if (stream == null || group == null) {
                log.error("订阅参数为 null");
                return;
            }

            if (!redisUtils.hasKey(CacheConstant.SYS_STREAM_HEARTBEAT)) {
                //第一个订阅上线，清除所有旧消息
                redisStreamUtil.trim(value.getName(), 0);
            }

            AckFlusher ackFlusher = new AckFlusher(asyncCommands, stream, group, ackBatchSize, ackInterval);
            consumerMap.put(String.format("%s^_^%s", group, stream), ackFlusher);

            final Subscription subscription = streamMessageListenerContainer.receive(
                    Consumer.from(group, RedisMqInit.getCONSUMER()),
                    StreamOffset.create(stream, ReadOffset.lastConsumed()),
                    listener
            );

            log.info("订阅消息, 消费者: {}, 订阅频道: {}, isActive: {}",
                    RedisMqInit.getCONSUMER(), stream, subscription != null && subscription.isActive());

            // 启动 Pending 清理器（增加 try 防止异常）
            try {
                new PendingAckCleaner(asyncCommands, stream, group, RedisMqInit.getCONSUMER(),
                        idleTimeout, idleInterval, pageSize, parallelism);
            } catch (Exception e) {
                log.error("启动 PendingAckCleaner 失败", e);
            }

        } catch (Exception e) {
            log.error("订阅失败", e);
        }
    }
}
