package com.atzc.source;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;

import java.util.Properties;


public class KafkaSourceUtils {

    static FlinkKafkaConsumer010<String> flinkKafkaConsumer010 = null;
    static Properties properties = new Properties();

    // kafka配置写在这里
    static {

        //设置接入点，请通过控制台获取对应Topic的接入点
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "47.99.72.109:9093,47.96.95.229:9093,47.98.190.254:9093");
        //测试的时候的消费组
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bi-transfer-test");
        //设置SSL根证书的路径，请记得将XXX修改为自己的路径
        //与sasl路径类似，该文件也不能被打包到jar中
//        props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "/home/hadoop/kafka.client.truststore.jks");
        //测试的时候放在本地
        properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "D:\\MyWork\\kafka.client.truststore.jks");
        //根证书store的密码，保持不变
        properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "KafkaOnsClient");
        //接入协议，目前支持使用SASL_SSL协议接入
        properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
        //SASL鉴权方式，保持不变
        properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
        //两次poll之间的最大允许间隔
        //可更加实际拉去数据和客户的版本等设置此值，默认30s
        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 30000);
        //设置单次拉取的量，走公网访问时，该参数会有较大影响
        properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 32000);
        properties.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, 32000);
        //每次poll的最大数量
        //注意该值不要改得太大，如果poll太多数据，而不能在下次poll之前消费完，则会触发一次负载均衡，产生卡顿
        properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 30);
        //消息的反序列化方式
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        //当前消费实例所属的消费组，请在控制台申请之后填写
        //属于同一个组的消费实例，会负载消费消息
//        props.put(ConsumerConfig.GROUP_ID_CONFIG, "bi-transfer");

        //hostname校验改成空
        properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");

        // offset重置参数
        //props.put("auto.offset.reset", "earliest");

        properties.put("sasl.jaas.config",
                "org.apache.kafka.common.security.scram.ScramLoginModule required username='aliyunkafka' password='8BPkZYAWXjwtachP';");
    }
    /**
     * 获取kafka连接
     * @return
     */
    public static FlinkKafkaConsumer010<String> getFlinkKafkaConsumer010(String topic){
        // 主题写在这里
        return new FlinkKafkaConsumer010<String>(topic,
                new SimpleStringSchema(),
                properties);
    }


}
