package com.urfresh.sp.flume.receive.service;

import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.Arrays;

@Service
public class TrackListener {

    Logger logger = LoggerFactory.getLogger(TrackListener.class);

    @Value("${kafka.track.app.data}")
    String appData;

    @Value("${kafka.track.sys.data}")
    String sysData;

    @Value("${kafka.track.app.applist}")
    String applistData;

    @Autowired
    @Qualifier("appEventTrack")
    KafkaConsumer<String, String> appEventTrack;

    @Autowired
    AppEventTrackInfo appEventTrackInfo;

    @PreDestroy
    public void destory() {
        if (appEventTrack != null)
            appEventTrack.close();
    }

    @PostConstruct
    public void init() {
        logger.info("启动线程监听kafka数据");
        // 启动一个线程，用来监听kafka的数据
        new Thread(new Runnable() {
            public void run() {
                // 监听新topic的数据
                appEventTrack.subscribe(Arrays.asList(appData, sysData, applistData));
                while (true) {
                    //取出所有数据
                    final ConsumerRecords<String, String> records = appEventTrack.poll(100);
                    //取出每个topic对应的数据，并传递到对应的service中
                    appEventTrackInfo.saveAppDataInfo(records.records(appData));
                    appEventTrackInfo.saveAppSysInfo(records.records(sysData));
                    appEventTrackInfo.saveAppListDataInfo(records.records(applistData));
                }
            }
        }).start();
    }


}
