package com.crux.kafka2ch;

import com.crux.kafka2ch.dto.AccessLogDto;
import com.crux.kafka2ch.service.AccessLogService;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;

import javax.annotation.PostConstruct;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

@Configuration
@Slf4j
public class KafkaConfig implements ApplicationContextAware {
    @Value("${kafka.bootstrap-servers}")
    private String servers;

    @Value("${kafka.topics}")
    private String topics;

    @Autowired
    private AccessLogService accessLogService;

    @Lazy(value = false)
    @PostConstruct
    public void init() {
        ExecutorService executorService = Executors.newSingleThreadExecutor();
        Properties props = new Properties();
        // 定义kakfa 服务的地址，不需要将所有broker指定上
        props.put("bootstrap.servers", servers);
        // 制定consumer group
        props.put("group.id", "clickhouse-accesslog");
        // 是否自动确认offset
        props.put("enable.auto.commit", "true");
        // 自动确认offset的时间间隔
        props.put("auto.commit.interval.ms", "1000");
        // key的序列化类
        props.put("key.deserializer", StringDeserializer.class.getName());
        // value的序列化类
        props.put("value.deserializer", StringDeserializer.class.getName());
        // 定义consumer
        KafkaConsumer<String, String> consumer = new KafkaConsumer(props);

        // 消费者订阅的topic, 可同时订阅多个
        consumer.subscribe(Arrays.asList(topics.split(",")));


        ObjectMapper mapper = new ObjectMapper();

        //       b.featuresToDisable(new Object[]{SerializationFeature.WRITE_DATES_AS_TIMESTAMPS})
        //       .featuresToDisable(new Object[]{DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES})
        //       .featuresToDisable(new Object[]{DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES})
        //       .featuresToDisable(new Object[]{SerializationFeature.FAIL_ON_EMPTY_BEANS})
        mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
                .disable(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES)
                .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
                .disable(SerializationFeature.FAIL_ON_EMPTY_BEANS);


        executorService.submit(() -> {
            while (true) {
                // 读取数据，读取超时时间为100ms
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100L));
                for (ConsumerRecord<String, String> record : records) {
                    String value = record.value();
                    log.trace("主题：" + record.topic() + ",分区：" + record.partition() + ",offset:" + record.offset() + ",value:" + value);
                    if (value != null) {
                        try {
                            //mapper
                            AccessLogDto accessLogDto = mapper.readValue(value, AccessLogDto.class);
                            if (applicationContext != null) {
                                //AccessLogEvent accessLogEvent = new AccessLogEvent(this, accessLogDto);
                                //applicationContext.publishEvent(accessLogEvent);
                                accessLogService.consumer(accessLogDto);
                            }
                        } catch (Exception e) {
                            log.error(e.getMessage(), e);
                        }

                    }
                }
            }
        });
    }

    private ApplicationContext applicationContext;

    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        this.applicationContext = applicationContext;
    }
}
