package com.tzg157.fitness.task;

import cn.hutool.json.JSONUtil;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;


@Service
public class ScheduleFetchMap2KafkaTask {

    @Resource
    private ConcurrentHashMap<String,Object> queueMap;

    private KafkaProducer<String,String> producer;

    @PostConstruct
    public void init(){
        Properties producerProps = new Properties();
        producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        producer = new KafkaProducer<>(producerProps);
    }

    @Scheduled(cron="0/5 * *  * * ? ")   //每5秒执行一次
    public void execute(){
        System.out.println("##################==================> " + queueMap);
        if(queueMap.isEmpty()){
            return;
        }
        ProducerRecord<String,String> record = new ProducerRecord<>("adsTopic", "adsKey", JSONUtil.toJsonStr(queueMap));
        producer.send(record);
        queueMap.clear();
    }
}
