package com.hps.kafka.component;

import com.hps.kafka.entity.KafkaMsg;
import com.hps.kafka.service.IKafkaMsgService;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @author heps
 * @date 2019/4/8 17:42
 */
@Component
public class KafkaConsumer {

    private static final Logger logger = LoggerFactory.getLogger(KafkaConsumer.class);

    private static AtomicInteger count = new AtomicInteger(0);

    private static StringBuffer sb = new StringBuffer();

    private IKafkaMsgService kafkaMsgService;

    private static List<KafkaMsg> kafkaMsgList = new ArrayList<>();

    public KafkaConsumer(IKafkaMsgService kafkaMsgService) {
        this.kafkaMsgService = kafkaMsgService;
    }

    @KafkaListener(topics = {"test"})
    public void listen(ConsumerRecord<?, ?> record) {
//        logger.info("partition: {}, offset: {} -> receive msg: {}", record.partition(), record.offset(), record.value());
        System.out.println(record.value());

        KafkaMsg kafkaMsg = new KafkaMsg();
        kafkaMsg.setValue((String) record.value());
        kafkaMsg.setMsgPartition(record.partition());
        kafkaMsg.setMsgTimestamp(record.timestamp());
        kafkaMsg.setTopic(record.topic());
        kafkaMsg.setOffset(record.offset());
        kafkaMsgList.add(kafkaMsg);

        if (kafkaMsgList.size() == 5000) {
            kafkaMsgService.saveBatch(kafkaMsgList);
            kafkaMsgList.clear();
        }
    }
}
