package cn.com.nes.site.kafka;

import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson2.JSON;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;

import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Future;

public class KafkaTemplate {

    private static Logger logger = LoggerFactory.getLogger(KafkaTemplate.class);
    private final KafkaProducer<String, String> producer;
    private static KafkaTemplate instance;
    public ConcurrentLinkedQueue<KafkaValue> queues = new ConcurrentLinkedQueue<>();

    private String topic = "iotdata";

    public KafkaTemplate() {
        //设置sasl文件的路径
//        JavaKafkaConfigurer.configureSasl();
        //加载kafka.properties
        Properties props = getProperties();
        //构造Producer对象，注意，该对象是线程安全的，一般来说，一个进程内一个Producer对象即可；
        //如果想提高性能，可以多构造几个对象，但不要太多，最好不要超过5个
        this.producer = new KafkaProducer<String, String>(props);

    }

    public Properties getProperties() {

        Properties kafkaProperties = JavaKafkaConfigurer.getKafkaProperties();
        topic = kafkaProperties.getProperty("topic");
        String mode = kafkaProperties.getProperty("access.mode");
        if ("vpc-ssl".equals(mode)) {
            return getVpcSslProperties();
        }else {
            return getVpcProperties();
        }
    }

    private Properties getVpcSslProperties(){

        Properties kafkaProperties =  JavaKafkaConfigurer.getKafkaProperties();
        Properties props = new Properties();
        //设置接入点，请通过控制台获取对应Topic的接入点
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getProperty("bootstrap.servers"));
        //设置SSL根证书的路径，请记得将XXX修改为自己的路径
        //与sasl路径类似，该文件也不能被打包到jar中
        props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, kafkaProperties.getProperty("ssl.truststore.location"));
        //根证书store的密码，保持不变
        props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "KafkaOnsClient");
        //接入协议，目前支持使用SASL_SSL协议接入
        props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");

        // 设置SASL账号
        String saslMechanism = kafkaProperties.getProperty("sasl.mechanism");
        String username = kafkaProperties.getProperty("sasl.username");
        String password = kafkaProperties.getProperty("sasl.password");
        if (!JavaKafkaConfigurer.isEmpty(username)
                && !JavaKafkaConfigurer.isEmpty(password)) {
            String prefix = "org.apache.kafka.common.security.scram.ScramLoginModule";
            if ("PLAIN".equalsIgnoreCase(saslMechanism)) {
                prefix = "org.apache.kafka.common.security.plain.PlainLoginModule";
            }
            String jaasConfig = String.format("%s required username=\"%s\" password=\"%s\";", prefix, username, password);
            props.put(SaslConfigs.SASL_JAAS_CONFIG, jaasConfig);
        }

        //SASL鉴权方式，保持不变
        props.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
        //Kafka消息的序列化方式
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        //请求的最长等待时间
        props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 1 * 1000);
        //设置客户端内部重试次数
        props.put(ProducerConfig.RETRIES_CONFIG, 5);
        //设置客户端内部重试间隔
        props.put(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG, 300); //3000
        //hostname校验改成空
        props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
        // 关闭幂等
        props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, false);

        return props;
    }

    private Properties getVpcProperties(){
        Properties kafkaProperties =  JavaKafkaConfigurer.getKafkaProperties();

        Properties props = new Properties();
        //设置接入点，请通过控制台获取对应Topic的接入点
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getProperty("bootstrap.servers"));

        //Kafka消息的序列化方式
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        //请求的最长等待时间
        props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 30 * 1000);
        //设置客户端内部重试次数
        props.put(ProducerConfig.RETRIES_CONFIG, 5);
        //设置客户端内部重试间隔
        props.put(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG, 3000);
        // 关闭幂等
        props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, false);
        //构造Producer对象，注意，该对象是线程安全的，一般来说，一个进程内一个Producer对象即可；
        //如果想提高性能，可以多构造几个对象，但不要太多，最好不要超过5个
        return props;
    }

    public static KafkaTemplate getInstance() {
        if (ObjectUtil.isNull(instance)) {
            instance = new KafkaTemplate();
        }
        return instance;
    }

    public void sendMm(String key,String tag) {
        if (StringUtils.isEmpty(key)) {
            return;
        }
        KafkaDbData kafkaDbData = new KafkaDbData();
        kafkaDbData.setType(KafkaDbData.MM);
        kafkaDbData.setJsonValue(tag);
        sendto(topic, key, JSON.toJSONString(kafkaDbData));
    }


    public void send(String topic,String key,String value){
        KafkaValue kafkaValue = new KafkaValue();
        kafkaValue.setTopic(topic);
        kafkaValue.setKey(key);
        kafkaValue.setValue(value);
        queues.add(kafkaValue);
    }

    public void sendto(String topic,String key,String value){
        try {
            ProducerRecord<String, String> kafkaMessage = new ProducerRecord<String, String>(topic, key, value);
            producer.send(kafkaMessage);
        } catch (Exception e) {
            e.printStackTrace();
        }
//        Future<RecordMetadata> future = producer.send(kafkaMessage);
//        try {
//            RecordMetadata recordMetadata = future.get();
////            logger.error("Produce ok:" + recordMetadata.toString());
//        }catch (Throwable t){
////            t.printStackTrace();
//            logger.error("Produce failure,key:{},topic:{},value:{}", key, topic, value);
//        }
    }

    public void handler() {
        try {
            //批量获取 futures 可以加快速度, 但注意，批量不要太大
            List<Future<RecordMetadata>> futures = new ArrayList<Future<RecordMetadata>>(128);
            for (int i =0; i < 128; i++) {
                KafkaValue  kafkaValue = queues.poll();
                if (ObjectUtil.isNotNull(kafkaValue)) {
                    logger.info("发送数据，topic:{},key:{},value{}",kafkaValue.getTopic(),kafkaValue.getKey(),kafkaValue.getValue());
                    ProducerRecord<String, String> kafkaMessage =  new ProducerRecord<String, String>(kafkaValue.getTopic(),kafkaValue.getKey(),kafkaValue.getValue());
                    Future<RecordMetadata> metadataFuture = producer.send(kafkaMessage);
                    futures.add(metadataFuture);
                }
            }
            producer.flush();
            for (Future<RecordMetadata> future: futures) {
                //同步获得Future对象的结果
                try {
                    RecordMetadata recordMetadata = future.get();
                    logger.error("Produce ok:" + recordMetadata.toString());
                } catch (Throwable t) {
                    logger.error("Produce failure:");
                    t.printStackTrace();
                }
            }
        } catch (Exception e) {
            //客户端内部重试之后，仍然发送失败，业务要应对此类错误
            logger.error("error occurred");
            e.printStackTrace();
        }
    }


    public static void main(String[] args) {
        KafkaTemplate.getInstance().sendto("formal-iot-data", "test", "test");
    }

}
