package yuekao6.util;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * date 2024/7/20 9:33
 */

public class KafkaUtil {

    /**
     * 读取kafka的话题
     *
     * @return
     */
    public static FlinkKafkaConsumer<String> kafkaSource(String topic){
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "test");
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), properties);
//        kafkaConsumer.setStartFromEarliest();

        return kafkaConsumer;
    }


    public static FlinkKafkaConsumer<String> kafkaSources(String topic){
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "test");
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), properties);
        kafkaConsumer.setStartFromEarliest();

        return kafkaConsumer;
    }

    /**
     * 写入kafka的指定话题
     *
     * @return
     */
    public static FlinkKafkaProducer<String> kafkaSink(String topic){
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");

        KafkaSerializationSchema<String> serializationSchema = new KafkaSerializationSchema<String>() {
            @Override
            public ProducerRecord<byte[], byte[]> serialize(String element, Long timestamp) {
                return new ProducerRecord<>(
                        topic, // target topic
                        element.getBytes(StandardCharsets.UTF_8)); // record contents
            }
        };

        FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(
                topic,             // target topic
                serializationSchema,    // serialization schema
                properties,             // producer config
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE); // fault-tolerance

        return kafkaProducer;
    }



}
