//package cm.kafka.service;
//
//import cm.kafka.configuration.ExecutorConfig;
//import cm.kafka.entity.AuditLog;
//import cm.kafka.entity.EventLog;
//import cm.kafka.util.LocalDateTimeUtils;
//import com.alibaba.fastjson.JSONObject;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.kafka.clients.consumer.ConsumerRecord;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.kafka.annotation.KafkaListener;
//import org.springframework.kafka.core.KafkaTemplate;
//import org.springframework.kafka.support.Acknowledgment;
//import org.springframework.stereotype.Service;
//import org.springframework.util.CollectionUtils;
//
//import java.time.LocalDateTime;
//import java.util.ArrayList;
//import java.util.List;
//
//@Service
//@Slf4j
//public class KafkaConsumerDemo {
//    @Autowired
//    ExecutorConfig executorConfig;
//
//    @Autowired
//    KafkaTemplate<String, String> kafkaTemplate;
//
//    @KafkaListener(topics = "test-syslog")
//    public void consumerTopic(List<ConsumerRecord> records, Acknowledgment ack) {
//        ack.acknowledge();
//        Thread thread = new Thread(() -> {
//            sendMessage(records);
//        });
//        executorConfig.getExecutor().execute(thread);//丢进线程池管理
//    }
//
//    public void sendMessage(List<ConsumerRecord> records) {
//        log.info("\n" + "-------开始数据结束,处理的条数：" + records.size() + "-------" + "\n");
//        Long startTime = System.currentTimeMillis();
//        List<AuditLog> auditLogList;
//        //数据转化解析
//        auditLogList = this.dohandle(records);
//        doSend(auditLogList);
//        log.info("----当前线程：" + Thread.currentThread().getName() + "----tsa-syslog分区:" + records.get(0).partition() + "-------总共耗时:" + (System.currentTimeMillis() - startTime) + "ms-----" + ",处理成功：" + auditLogList.size() + "\n");
//        log.info("\n" + "-------处理数据结束-------" + "\n");
//    }
//
//
//    private List<AuditLog> dohandle(List<ConsumerRecord> records) {
//        List<AuditLog> logList = new ArrayList<>();
//        List<EventLog> eventLogList = new ArrayList<>();
//        EventLog eventLog;
//        for (ConsumerRecord record : records) {
//            //message转化为EventLog对象
//            eventLog = setNecessEventLogData(record);
//            eventLogList.add(eventLog);
//        }
//
//        AuditLog auditLog;
//        if (!CollectionUtils.isEmpty(eventLogList)) {
//            //将分组的ip数据进行添加入库
//            for (EventLog eve : eventLogList) {
//                auditLog = new AuditLog(eve);
//                logList.add(auditLog);
//            }
//        }
//
//        return logList;
//    }
//
//    //message转化为EventLog对象
//    private EventLog setNecessEventLogData(ConsumerRecord record) {
//        JSONObject jsonObject = JSONObject.parseObject(record.value().toString());
//        EventLog eventLog = new EventLog();
//        eventLog.setEventAcceptTime(LocalDateTimeUtils.convertLDTToDate(LocalDateTime.now())); // 接收时间
//        if (jsonObject.get("fields") != null) {
//            String str = jsonObject.getJSONObject("fields").getString("host_ip");
//            eventLog.setHost(str);
//            eventLog.setDeviceAddress(str);
//        }
//        if (jsonObject.containsKey("collectionType")) {
//            eventLog.setCollectionType(jsonObject.get("collectionType").toString());
//        } else if (jsonObject.containsKey("type")) {
//            eventLog.setCollectionType(jsonObject.get("type").toString());
//        }
//
//        eventLog.setEventMessage(jsonObject.get("eventMessage").toString());
//        return eventLog;
//    }
//
//    private void doSend(List<AuditLog> auditLogList) {
//        auditLogList.forEach(
//                auditLog -> kafkaTemplate.send("test-warn", JSONObject.toJSONString(auditLog))
//        );
//    }
//
//
//}
