package com.fzf.org.config;

import com.fzf.org.mq.consumer.BlogReviewerConsumer;
import lombok.RequiredArgsConstructor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.stream.Consumer;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.ReadOffset;
import org.springframework.data.redis.connection.stream.StreamOffset;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.stream.StreamMessageListenerContainer;
import org.springframework.data.redis.stream.Subscription;

import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import static com.fzf.org.constants.RedisKeyConstant.BLOG_REVIEW_STREAM_GROUP_KEY;
import static com.fzf.org.constants.RedisKeyConstant.BLOG_REVIEW_STREAM_TOPIC_KEY;

/**
 * Redis Stream 消息队列配置
 */
@Configuration
@RequiredArgsConstructor
public class RedisStreamConfiguration {

    private final StringRedisTemplate redisTemplate;
    private final RedisConnectionFactory redisConnectionFactory;
    private final BlogReviewerConsumer blogReviewerConsumer;

    @Bean
    public ExecutorService asyncStreamConsumer() {
        AtomicInteger index = new AtomicInteger();
        return new ThreadPoolExecutor(1,
                1,
                60,
                TimeUnit.SECONDS,
                new SynchronousQueue<>(),
                runnable -> {
                    Thread thread = new Thread(runnable);
                    thread.setName("stream_consumer_blog_review_" + index.incrementAndGet());
                    thread.setDaemon(true);
                    return thread;
                },
                new ThreadPoolExecutor.DiscardOldestPolicy()
        );
    }

    @Bean(destroyMethod = "stop")
    public StreamMessageListenerContainer<String, MapRecord<String, String, String>> streamMessageListenerContainer(ExecutorService asyncStreamConsumer) {
        StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, MapRecord<String, String, String>> options =
                StreamMessageListenerContainer.StreamMessageListenerContainerOptions
                        .builder()
                        // 一次最多获取多少条消息
                        .batchSize(10)
                        // 执行从 Stream 拉取到消息的任务流程
                        .executor(asyncStreamConsumer)
                        // 如果没有拉取到消息，需要阻塞的时间。不能大于 ${spring.data.redis.timeout}，否则会超时
                        .pollTimeout(Duration.ofSeconds(3))
                        .build();
        
        StreamMessageListenerContainer<String, MapRecord<String, String, String>> container = 
            StreamMessageListenerContainer.create(redisConnectionFactory, options);
        container.start();
        return container;
    }
    
    @Bean
    public boolean initializeStreamAndGroup() {
        // 在应用启动时确保Stream和消费者组存在，避免消费者找不到Stream或消费者组而报错
        try {
            // 先检查Stream是否存在，如果不存在则创建（通过添加一个初始化消息）
            if (Boolean.FALSE.equals(redisTemplate.hasKey(BLOG_REVIEW_STREAM_TOPIC_KEY))) {
                Map<String, String> initMessage = new HashMap<>();
                initMessage.put("type", "init");
                initMessage.put("timestamp", String.valueOf(System.currentTimeMillis()));
                redisConnectionFactory.getConnection().streamCommands().xAdd(
                        BLOG_REVIEW_STREAM_TOPIC_KEY.getBytes(),
                        initMessage.entrySet().stream()
                                .collect(Collectors.toMap(e -> e.getKey().getBytes(), e -> e.getValue().getBytes())));

                // 创建消费者组
                redisConnectionFactory.getConnection().streamCommands().xGroupCreate(
                        BLOG_REVIEW_STREAM_TOPIC_KEY.getBytes(),
                        BLOG_REVIEW_STREAM_GROUP_KEY,
                        ReadOffset.from("0"),
                        false);

                System.out.println("已初始化Stream和消费者组: " + BLOG_REVIEW_STREAM_TOPIC_KEY + ", " + BLOG_REVIEW_STREAM_GROUP_KEY);
            } else {
                // 如果Stream已存在，确保消费者组也存在
                try {
                    redisConnectionFactory.getConnection().streamCommands().xGroupCreate(
                            BLOG_REVIEW_STREAM_TOPIC_KEY.getBytes(),
                            BLOG_REVIEW_STREAM_GROUP_KEY,
                            ReadOffset.from("0"),
                            false);
                    System.out.println("已创建消费者组: " + BLOG_REVIEW_STREAM_GROUP_KEY);
                } catch (Exception e) {
                    // 忽略消费者组已存在的错误
                    System.out.println("消费者组已存在: " + BLOG_REVIEW_STREAM_GROUP_KEY);
                }
            }
        } catch (Exception e) {
            System.err.println("初始化Stream和消费者组失败: " + e.getMessage());
            e.printStackTrace();
            return false;
        }
        return true;
    }


    @Bean
    public Subscription blogReviewConsumerSubscription(
            StreamMessageListenerContainer<String, MapRecord<String, String, String>> container) {
        // 使用ReadOffset.from("0")而不是lastConsumed()，确保消费者组首次创建时能读取所有消息
        // 对于已存在的消费者组，Redis会自动跟踪上次消费位置，所以不会重复消费
        StreamMessageListenerContainer.StreamReadRequest<String> streamReadRequest =
                StreamMessageListenerContainer.StreamReadRequest.builder(StreamOffset.create(BLOG_REVIEW_STREAM_TOPIC_KEY, ReadOffset.from("0")))
                        .cancelOnError(throwable -> false)
                        .consumer(Consumer.from(BLOG_REVIEW_STREAM_GROUP_KEY, "blog-review-consumer"))
                        .autoAcknowledge(true)
                        .build();
        
        return container.register(streamReadRequest, blogReviewerConsumer);
    }
}
