package com.hjl.demo.kafka;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;


public class MyKafkaUtil {
    public static String brokers = "hadoop01:9092,hadoop02:9092,hadoop03:9092";
    //返回一个kafka生产者
    public static FlinkKafkaProducer<String> getKafkaProducer(String topic){
        return new FlinkKafkaProducer<String>(brokers,
                topic,
                new SimpleStringSchema());
    }

    //kafka生产者 ods-dwd
    //前面的<T>是指方法的泛型
    public static String default_topic = "default"; //没有用到，但方法必须要提供一个默认topic
    public static <T> FlinkKafkaProducer<T> getKafkaProducer(KafkaSerializationSchema<T> kafkaSerializationSchema){

        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,brokers);

        return new FlinkKafkaProducer<T>(default_topic,kafkaSerializationSchema ,properties, FlinkKafkaProducer.Semantic.NONE);//这个是精确语义，Flink开启CK，Kafka再开启才有用
    }


    public static FlinkKafkaConsumer<String> getKafkaConsumer(String topic,String groupId){
        Properties properties = new Properties();
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,groupId);
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,brokers);
        return new FlinkKafkaConsumer<String>(topic,new SimpleStringSchema(),properties);

    }
}
