package com.utils;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

/**
 * kafka 的工具类
 */
public class KafkaUtils {
    // 获取生产者 用于sink
    public static  FlinkKafkaProducer createProduer(String topic){

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "192.168.80.128:9092");

        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<String>(
                topic,             // target topic
                new SimpleStringSchema(),    // serialization schema
                properties); // fault-tolerance
        return myProducer;

    }

    // 获取消费者数据
  public static FlinkKafkaConsumer<String> createConsumer(String topic,String groupId) {
      // 引入kafka
      Properties properties = new Properties();
      properties.setProperty("bootstrap.servers", "192.168.80.128:9092");
      properties.setProperty("group.id", "test");

      //2.  创建消费者
      FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), properties);

    return kafkaConsumer;


  }
}
