package com.wk.springbootlearn.kafka;

import com.google.gson.Gson;
import com.wk.springbootlearn.common.model.CommonLog;
import com.wk.springbootlearn.mapper.impl.LogMapperImpl;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.concurrent.TimeUnit;

@Component
public class KafkaConsumer {

    @Autowired
    private LogMapperImpl logMapper;

//    @KafkaListener(topics = "hello", groupId = "001")
//    public void consumer(ConsumerRecord<String, String> record, Acknowledgment ack){
//        String value = record.value();
//        System.out.println(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")) + "接收到kafka消息，partition：" + record.partition() + ",offset：" + record.offset() + ",value：" + value);
////        TimeUnit.SECONDS.sleep(1);
//        // 记录到mongodb中
//        logMapper.addOneLog(new Gson().fromJson(value, CommonLog.class));
//        ack.acknowledge();
//
//    }
}
