package com.xu.demo.common.flink;

import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.stereotype.Component;

import java.util.Properties;
import java.util.UUID;

@Slf4j
@Component
public class KafkaProducerTest {

    public static void main(String[] args) throws Exception {

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 创建KafksSink配置
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.ACKS_CONFIG, "1");
        properties.setProperty(ProducerConfig.LINGER_MS_CONFIG, "0");
        properties.setProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "10000");

        // 序列化模式
        KafkaRecordSerializationSchema<String> recordSerializer = KafkaRecordSerializationSchema.builder()
                //设置对哪个主题进行序列化
                .setTopic("topic_a")
                //设置数据值序列化方式
                .setValueSerializationSchema(new SimpleStringSchema())
                //设置数据key序列化方式
                .setKeySerializationSchema(new SimpleStringSchema())
                .build();

        // 创建KafkaSink算子
        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                //设置kafka各种参数
                .setKafkaProducerConfig(properties)
                //设置序列化模式
                .setRecordSerializer(recordSerializer)
                //设置传递保证
                //At Most Once (至多一次)： 系统保证消息要么被成功传递一次，要么根本不被传递。这种保证意味着消息可能会丢失，但不会被传递多
                //At Least Once (至少一次)： 系统保证消息至少会被传递一次，但可能会导致消息的重复传递。这种保证确保了消息的不丢失，但应用
                //Exactly Once (精确一次)： 系统保证消息会被确切地传递一次，而没有任何重复。这是最高级别的传递保证，确保消息不会丢失且不会
                //.setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
                //设置集群地址
                .setBootstrapServers("127.0.0.1:9092")
                //设置事务前缀
                .setTransactionalIdPrefix("flink_")
                .build();

        // 生成一个数据流
        SourceFunction<String> sourceFunction = new SourceFunction<String>() {
            @Override
            public void run(SourceContext<String> sourceContext) throws Exception {
                while (true) {
                    String id = UUID.randomUUID().toString();
                    sourceContext.collect(id);
                    log.info("正在下发数据:{}", id);
                    Thread.sleep(1000);
                }
            }

            @Override
            public void cancel() {
            }
        };

        // 创建数据源
        DataStreamSource<String> dataStreamSource = env.addSource(sourceFunction).setParallelism(1);

        // 数据流数据通过KafkaSink算子写入kafka
        dataStreamSource.sinkTo(kafkaSink).setParallelism(1);

        // 执行任务
        env.execute("KafkaSinkStreamJobDemo");

    }
}
