package avicit.bdp.dms.prm.Integration;

import avicit.bdp.common.utils.ConfigUtils;
import avicit.bdp.dms.prm.utils.Stopper;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;

import java.io.Closeable;
import java.io.IOException;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

/**
 * 消息订阅服务，
 *
 * @author xugb
 * @date 2021/11/4 17:50
 */
@Component
public class SubscribeServer implements Closeable {

    private static final Logger log = LoggerFactory.getLogger(SubscribeServer.class);
    @Autowired
    private HandleManager handleManager;

    public KafkaConsumer createKafkaStreamClient() {
        String bootstrapServers = ConfigUtils.getInstance().getString("integration.bootstrapServers");

        String groupId = ConfigUtils.getInstance().getString("subscribe.groupId", "integeration");
        Properties properties = new Properties();
        //kafka地址，多个地址用逗号分割
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        properties.put("zookeeper.session.timeout.ms", "15000");
        KafkaConsumer kafkaConsumer = new KafkaConsumer<String, String>(properties);

        return kafkaConsumer;
    }

    @Async("taskExecutor")
    public void initKafkaSubscribeServer() {
        KafkaConsumer kafkaConsumer = null;

        while (Stopper.isRunning()) {
            try {
                boolean isEnabled = ConfigUtils.getInstance().getBoolean("subscribe.isEnabled", false);
                if (isEnabled) {
                    try {
                        String topic = ConfigUtils.getInstance().getString("subscribe.topic", "kwaidoo.integration");
                        kafkaConsumer = createKafkaStreamClient();
                        kafkaConsumer.subscribe(Collections.singletonList(topic));
                        log.info(">>>>>>>>>>>>>>>数据集成消息订阅成功 <<<<<<<<<<<<<");
                        while (Stopper.isRunning() && isEnabled) {

                            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(1000));
                            for (ConsumerRecord<String, String> record : records) {
                                try {
                                    String data = record.value();
                                    handleManager.handleMessage(data);

                                } catch (Exception e) {
                                    log.error(e.getMessage(), e);
                                }
                            }
                            isEnabled = ConfigUtils.getInstance().getBoolean("subscribe.isEnabled", false);
                        }

                    } catch (Exception e) {
                        log.error(e.getMessage(), e);
                    } finally {
                        if (kafkaConsumer != null) {
                            kafkaConsumer.unsubscribe();
                            kafkaConsumer.close();
                        }
                        log.info(">>>>>>>>>>>>>>>数据集成消息 取消订阅 <<<<<<<<<<<<<");
                    }
                }


                Thread.sleep(2000);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }

        }


    }

    @Override
    public void close() throws IOException {
        Stopper.stop();
    }
}
