package com.zhu.util;

import com.ctc.wstx.osgi.WstxBundleActivator;
import com.zhu.config.ClusterParametersConfig;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * kafka 工具类
 */
public class ClusterKafkaUtil {

    public static FlinkKafkaConsumer<String> getFlinkKafkaConsumer(String topic,String groupId) throws IOException {

        Properties properties = PropertiesUtil.getProperties("D:\\java\\NLP\\Flink-realtimeProcess\\src\\main\\resources\\cluster.properties");

        Properties kafkaProperties = new Properties();
        kafkaProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,properties.getProperty(ClusterParametersConfig.KAFKA_BOOTSTRAP_SERVER));
        kafkaProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,groupId);

        return new FlinkKafkaConsumer<String>(topic,
                new KafkaDeserializationSchema<String>() {
                    @Override
                    public boolean isEndOfStream(String s) {
                        return false;  //无界流
                    }

                    /**
                     * kafka 自带的反序列化不允许 传入字符值为空 会直接抛出异常
                     * @param consumerRecord
                     * @return
                     * @throws Exception
                     */
                    @Override
                    public String deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
                        if(consumerRecord == null || consumerRecord.value() == null){
                            return null;
                        }else {
                            return new String(consumerRecord.value());
                        }
                    }

                    @Override
                    public TypeInformation<String> getProducedType() {
                        return BasicTypeInfo.STRING_TYPE_INFO;  //返回类型
                    }
                },kafkaProperties);
    }

    public static FlinkKafkaProducer<String> getFlinkKafkaProducer(String topic){

        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,ClusterParametersConfig.KAFKA_BOOTSTRAP_SERVER);
        return new FlinkKafkaProducer<String>(topic,
                new KafkaSerializationSchema<String>() {
                    @Override
                    public ProducerRecord<byte[], byte[]> serialize(String value, @Nullable Long aLong) {
                        if(value == null){
                            return new ProducerRecord<>(topic,"".getBytes(StandardCharsets.UTF_8));  //空值问题
                        }
                        return new ProducerRecord<>(topic,value.getBytes(StandardCharsets.UTF_8));
                    }
                },properties, FlinkKafkaProducer.Semantic.EXACTLY_ONCE);

    }


}
