package com.note.kafka.consumer;


import com.alibaba.fastjson.JSONObject;

import com.note.kafka.service.KafkaConsumerService;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.text.SimpleDateFormat;
import java.util.Date;

/**
 * <p>【描述】：消费者</p>
 * <p>【作者】: BeyMax</p>
 * <p>【日期】: 2019-08-29</p>
 **/
@Component
public class Listener {
    protected final Logger logger = LoggerFactory.getLogger(this.getClass());
    @Autowired
    KafkaConsumerService kafkaConsumerService;

    /**
     * <p>【描述】：监听"采集配置"【采集端部署】</p>
     * <p>【方法】：listen</p>
     * <p>【参数】: [record]</p>
     * <p>【作者】: BeyMax</p>
     * <p>【日期】: 2019-08-29</p>
     **/
//    @KafkaListener(topics = {"${kafka.consumer.topic1}"})
    public void listenTaskConfig(ConsumerRecord<?, ?> record) {
        long offset = record.offset();
        String key = record.key().toString();
        String value = record.value().toString();
        String timestamp = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(record.timestamp()));
        logger.info("=== kafka的key: {} offset:{}   value: {},timestamp:{}", key, offset, value, timestamp);
        // 写入postgresql采集配置表
        kafkaConsumerService.getTaskConfigKafka(JSONObject.parseObject(value));
    }

    /**
     * <p>【描述】：消费LinkedIn任务数据</p>
     * <p>【方法】：listenLinkedInTask</p>
     * <p>【参数】: [record]</p>
     * <p>【作者】: BayMax</p>
     * <p>【日期】: 2020/4/30</p>
     **/
    @KafkaListener(topics = {"${kafka.consumer.topic7}"})
    public void listenLinkedInTask(ConsumerRecord<?, ?> record) {
        long offset = record.offset();
        String value = record.value().toString();
        logger.info("=== kafka的offset:{}   value: {}", offset, value);
        kafkaConsumerService.listenLinkedInTask(JSONObject.parseObject(value));
    }


}
