package com.jiesone.logger.distributecleaning.listener;
/*
 *@ClassName LoggerDistributeKafkaMQService
 *@Description TODO  kafka中获取日志总入库
 *@Author tangsixiang@163.com
 *@Date 2022/1/18 17:53
 *@Version 1.0
 */


import com.alibaba.fastjson.JSON;
import com.jiesone.logger.distributecleaning.handler.DataProcessingHandler;
import com.jiesone.logger.distributecleaning.util.JsonStringUtil;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;
import java.util.Optional;



@Component
public class LoggerDistributeKafkaMQConsumer {
        Logger logger= LoggerFactory.getLogger(LoggerDistributeKafkaMQConsumer.class);
    @Autowired
    DataProcessingHandler processingHandler;
    @KafkaListener(topics ="#{'${logger-filebeat.kafkaTopicName}'.split(',')}" ,groupId = "businessProcess")
    public void onMessage(ConsumerRecord<String, String> record) {

        Optional<?> kafkaMessage = Optional.ofNullable(record.value());
        if (kafkaMessage.isPresent()) {
            String topic = record.topic();
             String key= record.key();
        logger.debug("====topic=="+topic+"======key=="+key+"========offset =" +  record.offset());
            //符合要求数据进入下一步流程
          if(key!=null&&! key.equals("")){
              //经过filebeat二次封装格式数据处理  即 json格式字符串
              // logger.info(JSON.parseObject(record.value(),JsonRootBean.class).getMessage());
              if (JsonStringUtil.isCheckJSON(record.value())||JsonStringUtil.getJSONType(record.value()))
              {
                  logger.debug("符合规范获取消息文本内容："+record.value());
                  processingHandler.pullApartStringMessage(key,record.value());

              }else{
                  //原始日志 --通过第三方平台推送的日志直接如消息队列中
                  Map map=new HashMap();
                  map.put("message",record.value());
                  map.put("@timestamp",record.timestamp());
                  processingHandler.pullApartStringMessage(key, JSON.toJSONString(map));
                  logger.debug("未fileBeat正常进入数据业务处理数据："+record.value());
              }


          }


        }




    }


}
