package com.cn.daimajiangxin.flink.sink;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Properties;

public class KafkaSinkDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 开启检查点以支持Exactly-Once语义
        env.enableCheckpointing(5000);

        DataStream<String> stream = env.fromData("Hello Kafka", "Flink to Kafka", "Data Pipeline");

        // Kafka配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "192.168.0.199:9092");

        // 创建Kafka Sink
        KafkaSink<String> sink = KafkaSink.<String>builder()
                .setKafkaProducerConfig(props)
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic("flink-output-topic")
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build())
                .build();

        // 添加Sink
        stream.sinkTo(sink);

        env.execute("Kafka Sink Demo");
    }
}
