package com.greate.community.event;

import com.alibaba.fastjson.JSONObject;
import com.greate.community.config.KafkaConsumerConfig;
import com.greate.community.entity.DiscussPost;
import com.greate.community.entity.Event;
import com.greate.community.entity.Message;
import com.greate.community.service.DiscussPostService;
import com.greate.community.service.ElasticsearchService;
import com.greate.community.service.MessageService;
import com.greate.community.util.CommunityConstant;
import com.greate.community.util.OffsetManager;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.time.Duration;
import java.util.*;

@ConditionalOnProperty(name="spring.kafka.consumer.enable-auto-commit",havingValue = "false")
@Component
public class consumerManual implements CommunityConstant {

    private static final Logger logger = LoggerFactory.getLogger(EventConsumer.class);

    @Autowired
    private MessageService messageService;

    @Autowired
    private DiscussPostService discussPostService;

    @Autowired
    private ElasticsearchService elasticsearchService;

    @Autowired
    private KafkaConsumerConfig kafkaConsumerConfig;

    @Autowired
    private OffsetManager offsetManager;

    private KafkaConsumer<String, String> commentConsumer; // 评论事件消费者
    private KafkaConsumer<String, String> publishConsumer; // 发帖事件消费者
    private KafkaConsumer<String, String> deleteConsumer;  // 删帖事件消费者

    private List<KafkaConsumer<String, String>> commentConsumers;

    // 初始化 KafkaConsumer
    @PostConstruct
    public void init() {
        commentConsumer = createConsumer(TOPIC_COMMNET, COMMENT_CONSUMER_GROUP);
        logger.info("评论事件消费者初始化完成，订阅主题: {}", TOPIC_COMMNET);

        publishConsumer = createConsumer(TOPIC_PUBLISH, PUBLISH_CONSUMER_GROUP);
        logger.info("发帖事件消费者初始化完成，订阅主题: {}", TOPIC_PUBLISH);

        deleteConsumer = createConsumer(TOPIC_DELETE, DELETE_CONSUMER_GROUP);
        logger.info("删帖事件消费者初始化完成，订阅主题: {}", TOPIC_DELETE);
    }

    private KafkaConsumer<String, String> createConsumer(String topic, String groupId) {
        ConsumerFactory<String, String> consumerFactory = kafkaConsumerConfig.createConsumerFactory(groupId);
        KafkaConsumer<String, String> consumer = (KafkaConsumer<String, String>) consumerFactory.createConsumer();
        consumer.subscribe(Collections.singletonList(topic));
        return consumer;
    }

    // 评论事件消费者，每隔 5 秒执行一次
    @Scheduled(fixedRate = 5000)
    public void consumeCommentMessages() {
        logger.info("开始从 Kafka 拉取评论事件消息...");
        consumeMessages(commentConsumer, TOPIC_COMMNET, COMMENT_CONSUMER_GROUP);
    }

    // 发帖事件消费者，每隔 10 秒执行一次
    @Scheduled(fixedRate = 10000)
    public void consumePublishMessages() {
        logger.info("开始从 Kafka 拉取发帖事件消息...");
        consumeMessages(publishConsumer, TOPIC_PUBLISH, PUBLISH_CONSUMER_GROUP);
    }

    // 删帖事件消费者，每隔 30 秒执行一次
    @Scheduled(fixedRate = 30000)
    public void consumeDeleteMessages() {
        logger.info("开始从 Kafka 拉取删帖事件消息...");
        consumeMessages(deleteConsumer, TOPIC_DELETE, DELETE_CONSUMER_GROUP);
    }


    /**
     * 从 kafka 中消费到数据，先提取偏移量，，等数据成功处理 消费后，将偏移量存储到redis中。
     * 下一次从 kafka 中消费数据之前，先到redis中读取偏移量， 使用读取到的偏移量到kakfa中消费数据
     * @param consumer
     * @param topic      主题
     * @param groupId 消费者组
     */
    private void consumeMessages(KafkaConsumer<String, String> consumer, String topic, String groupId) {
        // 从 Redis 中读取偏移量
        Map<TopicPartition, Long> offsets = offsetManager.readOffset(topic, groupId);

        // 设置消费者偏移量
        for (Map.Entry<TopicPartition, Long> entry : offsets.entrySet()) {
            consumer.seek(entry.getKey(), entry.getValue());
            logger.info("设置分区 {} 的偏移量为 {}", entry.getKey(), entry.getValue());
        }

        // 拉取消息，设置超时时间为 1 秒
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));

        // 处理消息
        for (ConsumerRecord<String, String> record : records) {
            handleMessage(record);
        }
        logger.info("本次拉取 {} 主题的消息数量: {}", topic, records.count());
    }

    private void handleMessage(ConsumerRecord<String, String> record) {
        if (record == null || record.value() == null) {
            logger.error("消息的内容为空");
            return;
        }

        Event event = JSONObject.parseObject(record.value(), Event.class);
        if (event == null) {
            logger.error("消息格式错误");
            return;
        }

        switch (record.topic()) {
            case TOPIC_COMMNET:
                handleCommentMessage(record);
                break;
            case TOPIC_PUBLISH:
                handlePublishMessage(record);
                break;
            case TOPIC_DELETE:
                handleDeleteMessage(record);
                break;
            default:
                logger.error("未知主题: {}", record.topic());
        }
    }

    private void handleCommentMessage(ConsumerRecord<String, String> record) {
        if (record == null || record.value() == null) {
            logger.error("消息的内容为空");
            return;
        }

        Event event = JSONObject.parseObject(record.value(), Event.class);
        if (event == null) {
            logger.error("消息格式错误");
            return;
        }

        // 发送系统通知
        Message message = new Message();
        message.setFromId(SYSTEM_USER_ID);
        message.setToId(event.getEntityUserId());
        message.setConversationId(event.getTopic());
        message.setCreateTime(new Date());

        Map<String, Object> content = new HashMap<>();
        content.put("userId", event.getUserId());
        content.put("entityType", event.getEntityType());
        content.put("entityId", event.getEntityId());
        if (!event.getData().isEmpty()) { // 存储 Event 中的 Data
            for (Map.Entry<String, Object> entry : event.getData().entrySet()) {
                content.put(entry.getKey(), entry.getValue());
            }
        }
        message.setContent(JSONObject.toJSONString(content));

        // 保存消息到数据库
        messageService.addMessage(message);
        // 提交偏移量到 Redis
        Map<TopicPartition, Long> offsets = new HashMap<>();
        offsets.put(new TopicPartition(record.topic(), record.partition()), record.offset() + 1);
        offsetManager.saveOffset(TOPIC_COMMNET, COMMENT_CONSUMER_GROUP, offsets);

        logger.info("消息处理完成，偏移量已提交: {}", offsets);
    }

    private void handlePublishMessage(ConsumerRecord<String, String> record) {
        if (record == null || record.value() == null) {
            logger.error("消息的内容为空");
            return;
        }

        Event event = JSONObject.parseObject(record.value(), Event.class);
        if (event == null) {
            logger.error("消息格式错误");
            return;
        }

        DiscussPost post = discussPostService.findDiscussPostById(event.getEntityId());
        elasticsearchService.saveDiscusspost(post);

        // 提交偏移量到 Redis
        Map<TopicPartition, Long> offsets = new HashMap<>();
        offsets.put(new TopicPartition(record.topic(), record.partition()), record.offset() + 1);
        offsetManager.saveOffset(TOPIC_PUBLISH, PUBLISH_CONSUMER_GROUP, offsets);

        logger.info("发帖事件处理完成，偏移量已提交: {}", offsets);
    }

    private void handleDeleteMessage(ConsumerRecord<String, String> record) {
        if (record == null || record.value() == null) {
            logger.error("消息的内容为空");
            return;
        }

        Event event = JSONObject.parseObject(record.value(), Event.class);
        if (event == null) {
            logger.error("消息格式错误");
            return;
        }
        elasticsearchService.deleteDiscusspost(event.getEntityId());
        // 提交偏移量到 Redis
        Map<TopicPartition, Long> offsets = new HashMap<>();
        offsets.put(new TopicPartition(record.topic(), record.partition()), record.offset() + 1);
        offsetManager.saveOffset(TOPIC_DELETE, DELETE_CONSUMER_GROUP, offsets);
    }
}

