package cool.webstudy.async.mq.kafka.listener.log.api;

import com.fasterxml.jackson.databind.ObjectMapper;
import cool.webstudy.async.service.ApiLoggerService;
import cool.webstudy.common.constant.LoggerConstant;
import cool.webstudy.common.model.dto.logger.api.CreateApiLogESDTO;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.stereotype.Component;

import java.util.Optional;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 * @author 莫振双
 * @date 2024/8/27 17:42
 * @description: 接口请求日志消费者
 */
@Component
public class ApiLoggerESListener {
    Logger businessLogger = Logger.getLogger(LoggerConstant.BUSINESS_LOGGER);
    @Autowired
    private ApiLoggerService apiLoggerService;

    @KafkaListener(topics = LoggerConstant.API_LOG_ES_TOPIC, groupId = LoggerConstant.LOGGER_GROUP_ID)
    public void listener(ConsumerRecord<?, ?> record, @Header(KafkaHeaders.RECEIVED_TOPIC) String topic) {
        Optional message = Optional.ofNullable(record.value());
        if (message.isPresent()) {
            Object msg = message.get();
            String msgStr = (String) msg;
            if (StringUtils.isBlank(msgStr)) {
                return;
            }
            try {
                //反序列化
                CreateApiLogESDTO dto = new ObjectMapper().readValue(msgStr, CreateApiLogESDTO.class);
                apiLoggerService.createApiLogToES(dto);
            } catch (Exception e) {
                businessLogger.log(Level.OFF, e.getMessage(), e.getStackTrace());
            }
        }
    }
}
