package com.dd.stack.kafka.consumer.service.inner.impl;

import com.dd.stack.kafka.common.pojo.BO.JsonUserBO;
import com.dd.stack.kafka.common.pojo.BO.LogMessage;
import com.dd.stack.kafka.common.pojo.BO.User;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.util.JSONPObject;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.context.annotation.Description;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Service;

import java.io.DataInput;
import java.io.IOException;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;

/**
 * @Author liuxianmeng
 * @CreateTime 2025/2/14 2:39
 * @Description KafkaServiceImpl
 */
@Slf4j
@Service
public class KafkaServiceImpl {
    // 定义 Kafka 主题
    private static final String TOPIC = "test_topic";

    /**
     * 消费Kafka消息 此处是消费者 监听TOPIC主题
     * @param message 接收到的消息内容
     */
    @KafkaListener(topics = TOPIC, groupId = "consumer-group")
    public void listen(String message, Acknowledgment acknowledgment) {
        log.info("Received message: " + message);
        // 提交偏移量
        acknowledgment.acknowledge();
    }

    //@Description("用于测试avro序列化Java对象传输kafka消息")
    //@KafkaListener(topics = "users", groupId = "user-group")
    //public void consumeUser(ConsumerRecord<String, com.dd.stack.kafka.common.pojo.avro.gene.User> record) {
    //    com.dd.stack.kafka.common.pojo.avro.gene.User originUser = record.value();
    //
    //    // 转换为业务模型
    //    User user = new User();
    //    user.setId(originUser.getId());
    //    user.setUsername(originUser.getUsername().toString());
    //    user.setEmail(originUser.getEmail().toString());
    //    // Avro存储的是毫秒时间戳
    //    Instant instant = Instant.ofEpochMilli(originUser.getCreatedAt().toEpochMilli());
    //    user.setCreatedAt(LocalDateTime.ofInstant(instant, ZoneId.systemDefault()));
    //
    //    System.out.println("Consumed user: " + user);
    //}

    @Description("用于测试Json序列化Java对象传输kafka消息")
    @KafkaListener(topics = "users", groupId = "user-group")
    public void consumeUser(ConsumerRecord<String, JsonUserBO> record, Acknowledgment acknowledgment) {
        log.info("Received message: " + record);
        JsonUserBO user = record.value();
        log.info("Consumed user: " + user);
        log.info("Consumed user.createAt: " + user.getCreatedAt());
        // 提交偏移量
        acknowledgment.acknowledge();
    }

    @Description("Kafka日志处理")
    @KafkaListener(topics = "app-logs", groupId = "log-consumer-group")
    public void consumeLog(LogMessage logMessage, Acknowledgment acknowledgment) {
        // 这里实现日志处理逻辑
        System.out.println("Received log: " + logMessage);

        // 实际项目中可能：
        // 1. 存储到 Elasticsearch
        // 2. 发送到监控系统
        // 3. 异常报警等

        // 提交偏移量
        acknowledgment.acknowledge();
    }
}
