package com.example.demo.stream.processor;

import com.example.demo.stream.annotation.RedisStreamListener;
import com.example.demo.stream.common.DataObject;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.aop.framework.AopProxyUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.domain.Range;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.stream.*;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.stream.*;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

@Configuration
@Slf4j
public class RedisStreamListenerRegistrar implements ApplicationContextAware, InitializingBean {

    private ApplicationContext applicationContext;
    private final RedisConnectionFactory redisConnectionFactory;
    private final StringRedisTemplate stringRedisTemplate;
    private final ObjectMapper mapper = new ObjectMapper();
    public RedisStreamListenerRegistrar(RedisConnectionFactory redisConnectionFactory,
                                        StringRedisTemplate stringRedisTemplate) {
        this.redisConnectionFactory = redisConnectionFactory;
        this.stringRedisTemplate = stringRedisTemplate;
    }

    @Override
    public void setApplicationContext(ApplicationContext ctx) {
        this.applicationContext = ctx;
    }

    @Override
    public void afterPropertiesSet() {
        Map<String, Object> beans = applicationContext.getBeansWithAnnotation(RedisStreamListener.class);
        beans.forEach((beanName, bean) -> {
            Class<?> targetClass = AopProxyUtils.ultimateTargetClass(bean);
            RedisStreamListener ann = targetClass.getAnnotation(RedisStreamListener.class);
            if (bean instanceof RedisStreamMessageHandler) {
                registerStreamListener(ann, (RedisStreamMessageHandler) bean);
            }
        });
    }

    private void registerStreamListener(RedisStreamListener ann, RedisStreamMessageHandler handler) {
        // 1. 创建独立线程池
        ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
        executor.setCorePoolSize(ann.consumeThreadMax());
        executor.setMaxPoolSize(ann.consumeThreadMax());
        executor.setQueueCapacity(100);
        executor.setThreadNamePrefix("redis-stream-" + ann.topic() + "-");
        executor.initialize();

        // 2. 判断 group 是否存在，如果不存在自动创建，并设置从头消费
        ReadOffset readOffset = ReadOffset.lastConsumed();
        try {
            if (!Boolean.TRUE.equals(stringRedisTemplate.hasKey(ann.topic()))) {
                stringRedisTemplate.opsForStream()
                        .add(MapRecord.create(ann.topic(), Collections.singletonMap("init", "0")));
            }
            stringRedisTemplate.opsForStream().createGroup(ann.topic(), ann.consumerGroup());
            // 新建 group，从头消费
            readOffset = ReadOffset.from("0-0");
            System.out.println("新建 group：" + ann.consumerGroup() + ", stream：" + ann.topic());
        } catch (Exception e) {
            if (e.getMessage() != null && e.getCause().getMessage().contains("BUSYGROUP")) {
                // 消费组已存在，忽略即可
            } else {
                throw e;
            }
            // group 已存在，使用 lastConsumed
            System.out.println("已存在 group：" + ann.consumerGroup() + ", stream：" + ann.topic());
        }

        // 3. 配置容器
        StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, MapRecord<String, String, String>> options =
                StreamMessageListenerContainer.StreamMessageListenerContainerOptions.builder()
                        .executor(executor)
                        .pollTimeout(Duration.ofSeconds(1))
                        .build();

        StreamMessageListenerContainer<String, MapRecord<String, String, String>> container =
                StreamMessageListenerContainer.create(redisConnectionFactory, options);

        // 4. 注册监听
        String consumerName = ann.topic() + "-" + UUID.randomUUID();
        StreamOffset<String> offset = StreamOffset.create(ann.topic(), readOffset);
        ObjectMapper mapper = new ObjectMapper();
        container.receive(
                Consumer.from(ann.consumerGroup(), consumerName),
                offset,
                (MapRecord<String, String, String> record) -> {
                    try {
                        // record.getValue() 就是存进去的 Map<String, String>
                        DataObject<?> obj = mapper.convertValue(record.getValue(), DataObject.class);
                        obj.setMessageId(record.getId().getValue());
                        // 调用 handler
                        ((RedisStreamMessageHandler) handler).onMessage(obj);

                        // 手动 ack
                        stringRedisTemplate.opsForStream()
                                .acknowledge(ann.topic(), ann.consumerGroup(), record.getId());
                    } catch (Exception ex) {
                        log.error("消费失败，留在pending，等待重试，stream={},group={},id={}"
                        , ann.topic(), ann.consumerGroup(), record.getId());
                    }
                }
        );

        // 5. 启动容器并打印日志
        container.start();

//        // 5. 定时扫描 pending list
//        Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(() -> {
//            try {
//                // 获取 pending 消息
//                PendingMessages pending = stringRedisTemplate.opsForStream()
//                        .pending(ann.topic(), ann.consumerGroup(), Range.unbounded(), 100);
//                for (PendingMessage msg : pending) {
//                    log.info("消息次数："+msg.getTotalDeliveryCount());
//                    if (msg.getTotalDeliveryCount() > ann.maxRetryCount()) {
//                        log.warn("消息超过最大重试次数: {}, stream={}, id={}",
//                                ann.maxRetryCount(), ann.topic(), msg.getIdAsString());
//                        continue;
//                    }
//                    // 再次读取 pending 消息并重试
//                    List<MapRecord<String, Object, Object>> records = stringRedisTemplate.opsForStream()
//                            .range(ann.topic(), Range.closed(msg.getIdAsString(), msg.getIdAsString()));
//
//                    for (MapRecord<String, Object, Object> record : records) {
//                        try {
//                            DataObject<?> obj = mapper.convertValue(record.getValue(), DataObject.class);
//                            obj.setMessageId(record.getId().getValue());
//                            handler.onMessage(obj);
//                            stringRedisTemplate.opsForStream()
//                                    .acknowledge(ann.topic(), ann.consumerGroup(), record.getId());
//                            log.info("重试成功: stream={}, id={}", ann.topic(), record.getId());
//                        } catch (Exception ex) {
//                            log.error("重试失败: stream={}, id={},cou", ann.topic(), record.getId(), ex);
//                        }
//                    }
//                }
//            } catch (Exception e) {
//                log.error("Pending list 扫描出错: stream={}, group={}", ann.topic(), ann.consumerGroup(), e);
//            }
//        }, 0, ann.retryInterval(), TimeUnit.SECONDS);
// 5. 定时扫描 pending list
        // 5. 定时扫描 pending list
        // 4. 定时扫描 pending 消息
        Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(
                new Runnable() {
                    @Override
                    public void run() {
                        try {
                            //                // 获取 pending 消息
                PendingMessages pendingMessages = stringRedisTemplate.opsForStream()
                        .pending(ann.topic(), ann.consumerGroup(), Range.unbounded(), 100);

                            for (PendingMessage msg : pendingMessages) {
                                if (msg.getTotalDeliveryCount() > ann.maxRetryCount()) {
                                    log.warn("消息超过最大重试次数: {}, stream={}, id={}",
                                            ann.maxRetryCount(), ann.topic(), msg.getIdAsString());
                                    // 手动 ack
                                    stringRedisTemplate.opsForStream()
                                            .acknowledge(ann.topic(), ann.consumerGroup(), msg.getId());
                                    continue;
                                }

                                // 使用 execute 调用 Lettuce 低级 API xClaim
                                stringRedisTemplate.execute((RedisConnection connection) -> {
                                    byte[] key = stringRedisTemplate.getStringSerializer().serialize(ann.topic());
                                    RecordId recordId = RecordId.of(msg.getIdAsString());
                                    connection.streamCommands().xClaim(
                                            key,
                                            ann.consumerGroup(),
                                            ann.consumer(),
                                            Duration.ofSeconds(1),
                                            recordId
                                    );
                                    return null;
                                });

                                // 获取消息内容
                                List<MapRecord<String, Object, Object>> records =
                                        stringRedisTemplate.opsForStream().range(
                                                ann.topic(),
                                                Range.closed(msg.getIdAsString(), msg.getIdAsString())
                                        );

                                for (MapRecord<String, Object, Object> r : records) {
                                    DataObject<?> obj = mapper.convertValue(r.getValue(), DataObject.class);
                                    obj.setMessageId(r.getId().getValue());
                                    try {
                                        handler.onMessage(obj);
                                        stringRedisTemplate.opsForStream()
                                                .acknowledge(ann.topic(), ann.consumerGroup(), r.getId());
                                        log.info("Pending 消息重试成功: stream={}, id={}", ann.topic(), r.getId());
                                    } catch (Exception ex) {
                                        log.error("Pending 消息重试失败: stream={}, id={}", ann.topic(), r.getId(), ex);
                                    }
                                }
                            }
                        } catch (Exception e) {
                            log.error("Pending list 扫描出错: stream={}, group={}", ann.topic(), ann.consumerGroup(), e);
                        }
                    }
                },
                0,
                ann.retryInterval(),
                TimeUnit.SECONDS
        );

    }
    private void moveToDeadLetterQueue(String stream, String messageId) {
        // 将消息移动到死信队列
        MapRecord<String, Object, Object> record = stringRedisTemplate.opsForStream()
                .range(stream, Range.closed(messageId, messageId))
                .get(0);

        stringRedisTemplate.opsForStream().add("dead-letter-" + stream, record.getValue());
        stringRedisTemplate.opsForStream().delete(stream, messageId);
    }


}
