package com.tron.sync.config;

import com.tron.sync.controller.KafkaMessageHandler;
import com.tron.sync.service.ElkService;
import com.tron.sync.service.SyndaOrgService;
import com.tron.sync.service.SyndaUserService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.annotation.Resource;
import java.util.Properties;

/**
 * 腾讯云kafka配置
 * @since  2021-05-28
 * @author 胡祖有
 */
@Slf4j
@Configuration
@ConditionalOnProperty(name = "tbds.kafka.enable",havingValue = "kafka")
public class TBDSKafkaConfig {
    @Value("${tbds.kafka.servers}")
    private String servers;

    @Value("${tbds.kafka.group}")
    private String group;


    @Value("${tbds.kafka.auth.secureId}")
    private String secureId;

    @Value("${tbds.kafka.auth.secureKey}")
    private String secureKey;

    @Value("${tbds.kafka.auth.enabled}")
    private boolean enable;

    @Value("${tbds.kafka.retries}")
    private Integer retries;

    @Value("${tbds.kafka.buffer-memory}")
    private long memory;

    @Value("${tbds.kafka.topic}")
    private String topic;
    @Autowired
    SyndaOrgService syndaOrgService;

    @Autowired
    SyndaUserService syndaUserService;

    @Autowired
    ElkService elkService;



    @Bean
    public KafkaProducer<String,String> kafkaProducer(){
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
        props.put(ProducerConfig.ACKS_CONFIG,"all");
        props.put(ProducerConfig.RETRIES_CONFIG,retries);
        props.put(ProducerConfig.BATCH_SIZE_CONFIG,16384);
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG,memory);
        if (enable){
            confForAuthentication(props, secureId, secureKey);
        }
        KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer());
        return producer;
    }

    private void confForAuthentication(Properties props,String secureId,String secureKey) {
        // 设置认证参数
        props.put(TbdsAuthenticationUtil.KAFKA_SECURITY_PROTOCOL, TbdsAuthenticationUtil.KAFKA_SECURITY_PROTOCOL_AVLUE);
        props.put(TbdsAuthenticationUtil.KAFKA_SASL_MECHANISM, TbdsAuthenticationUtil.KAFKA_SASL_MECHANISM_VALUE);
        props.put(TbdsAuthenticationUtil.KAFKA_SASL_TBDS_SECURE_ID,secureId);
        props.put(TbdsAuthenticationUtil.KAFKA_SASL_TBDS_SECURE_KEY,secureKey);
    }

    @Bean
    public KafkaConsumer<String,String>  kafkaConsumer() {
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, group);
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");
        if (enable){
            confForAuthentication(props, secureId, secureKey);
        }
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props,new StringDeserializer(),new StringDeserializer());
        KafkaMessageHandler kafkaMessageHandler = new KafkaMessageHandler(consumer,topic,syndaOrgService,syndaUserService,elkService);
        kafkaMessageHandler.start();
        return consumer;
    }
}
