package com.huawei.dli.flink.sye.utils;

import com.huawei.dli.flink.sye.avro.AvroDerializationSchema;
import com.huawei.dli.flink.sye.source.KafkaKerbosSource;
import com.huawei.dli.flink.sye.source.KafkaNoKerbosSource;
import com.huawei.hwclouds.drs.avro.Record;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;

import java.util.Properties;

public class KafkaUtil {



    public static KafkaKerbosSource<String> getOggKafkaStringSource(ParameterTool params, String userPrincipal){
        String groupId = params.getRequired("groupId");
        Properties kafkaProps = new Properties();
        kafkaProps.setProperty("bootstrap.servers", "25.34.21.1:21007");
        kafkaProps.setProperty("auto.commit.interval.ms","4000");
        kafkaProps.setProperty("group.id", groupId);
        kafkaProps.setProperty("enable.auto.commit", "true");

        String reset = params.getRequired("reset");
        kafkaProps.setProperty("auto.offset.reset",reset);
        kafkaProps.setProperty("security.protocol", "SASL_PLAINTEXT");
        kafkaProps.setProperty("sasl.kerberos.service.name", "kafka");
        kafkaProps.setProperty("kerberos.domain.name", "hadoop.d7351232_0e6e_47a9_88a8_410e33a5e3dd.com");

        String topic = params.getRequired("topic");
        KafkaKerbosSource<String> stringSource = new KafkaKerbosSource<>(topic, new SimpleStringSchema(), kafkaProps, userPrincipal);
        return  stringSource;
    }
    /**
     * 获取华为avro格式的数据
     * @param params
     * @param userPrincipal
     * @return
     */
    public static KafkaKerbosSource<Record> getKafkaAvroSource(ParameterTool params, String userPrincipal){
        String groupId = params.getRequired("groupId");
        Properties kafkaProps = new Properties();
        kafkaProps.setProperty("bootstrap.servers", "25.34.21.1:21007");
        kafkaProps.setProperty("auto.commit.interval.ms","4000");
        kafkaProps.setProperty("group.id", groupId);
        kafkaProps.setProperty("enable.auto.commit", "true");

        String reset = params.getRequired("reset");
        kafkaProps.setProperty("auto.offset.reset",reset);
        kafkaProps.setProperty("security.protocol", "SASL_PLAINTEXT");
        kafkaProps.setProperty("sasl.kerberos.service.name", "kafka");
        kafkaProps.setProperty("kerberos.domain.name", "hadoop.d7351232_0e6e_47a9_88a8_410e33a5e3dd.com");

        String topic = params.getRequired("topic");

        DeserializationSchema<Record> valueDeserializer = new AvroDerializationSchema();
        KafkaKerbosSource<Record> drsSourceFunction = new KafkaKerbosSource<>(topic, valueDeserializer, kafkaProps, userPrincipal);
        return  drsSourceFunction;
    }

    /**
     * 获取华为mqs的数据,不需要kerbos的认证,所以注释掉了
     * @param params
     * @param userPrincipal
     * @return
     */
    public static KafkaNoKerbosSource<String> getKafkaStringSource(ParameterTool params, String userPrincipal){
        String groupId = params.getRequired("groupId");
        String servers = params.getRequired("servers");
        Properties kafkaProps = new Properties();
        kafkaProps.setProperty("bootstrap.servers", servers);
        kafkaProps.setProperty("auto.commit.interval.ms","4000");
        kafkaProps.setProperty("group.id", groupId);
        kafkaProps.setProperty("enable.auto.commit", "true");

        String reset = params.getRequired("reset");
        kafkaProps.setProperty("auto.offset.reset",reset);
//        kafkaProps.setProperty("security.protocol", "SASL_PLAINTEXT");
//        kafkaProps.setProperty("sasl.kerberos.service.name", "kafka");
//        kafkaProps.setProperty("kerberos.domain.name", "hadoop.d7351232_0e6e_47a9_88a8_410e33a5e3dd.com");

        String topic = params.getRequired("topic");
        KafkaNoKerbosSource<String> stringKafkaSource = new KafkaNoKerbosSource<>(topic, new SimpleStringSchema(), kafkaProps, userPrincipal);
        return stringKafkaSource;
    }

}
