package com.ws.common.util;

import com.ws.common.constant.Constant;
import org.apache.kafka.clients.producer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;
import java.util.concurrent.Future;

public class KafkaUtils {
    private final static Logger log = LoggerFactory.getLogger(KafkaUtils.class);

    private static Producer<String, String> producer;

    private KafkaUtils() {

    }

    /**
     * 生产者，注意kafka生产者不能够从代码上生成主题，只有在服务器上用命令生成
     */
    static {
        Properties props = new Properties();
        //  配置kafka服务器地址
        props.put("bootstrap.servers", Constant.kafka_producer_bootstrap_servers);
        //副本应答后再发送
        props.put("acks", Constant.kafka_producer_acks);
//        发送失败后，再发送次数
        props.put("retries", Constant.kafka_producer_retries);
//        批处理
        props.put("batch.size", Constant.kafka_producer_retries);
//         请求时间间隔
        props.put("linger.ms", Constant.kafka_producer_linger_ms);
//        发送失败等待时间
        props.put("request.timeout.ms", Constant.kafka_producer_request_timeout);
//        缓存大小设置
        props.put("buffer.memory", Constant.kafka_producer_buffer_memory);
//        配置key以及value的序列号
        props.put("key.serializer", Constant.kafka_producer_key_serializer);
        props.put("value.serializer", Constant.kafka_producer_value_serializer);
        props.put("key.deserializer",Constant.kafka_producer_key_deserializer);
        props.put("value.deserializer",Constant.kafka_producer_value_deserializer);
        //判断是否需要kerberos认证
        if ("yes".equals(Constant.kafka_client_security_mode)) {
            System.out.println("huawei server 认证");
            props.put("security.protocol", Constant.kafka_security_protocol);
            props.put("sasl.kerberos.service.name", Constant.kafka_sasl_kerberos_service_name);
            props.put("kerberos.domain.name", Constant.kafka_kerberos_domain_name);
        }
        producer = new KafkaProducer<String, String>(props);
    }

    /**
     * 发送对象消息 至kafka上,调用json转化为json字符串，应为kafka存储的是String。
     *
     * @param jsonKafkaMessage
     */
    public static void sendMsgToKafka(String jsonKafkaMessage,String topic) {
        try {

            //异步
            Future result = producer.send(new ProducerRecord<String, String>(topic, jsonKafkaMessage), new Callback() {
                        @Override
                        public void onCompletion(RecordMetadata metadata, Exception e) {
                            if (e == null) {
                                log.info("send success--");
                            } else {
                                e.printStackTrace();
                                log.info("send failed!!!");
                                log.info("exception:{}");
                            }
                        }
                    }
            );
            //result.get();
            //   System.out.println("send completion?:{}",result.isDone());
        } catch (Exception e) {
            log.info("kafka error!!");
            e.printStackTrace();
        }

    }


    public static void closeKafkaProducer() {
        producer.close();
    }
}

