package demo;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Properties;

/**
 * kafka 读取数据写入Kafka 实现Exactly Once
 */
public class KafkaToKafkaExactlyOnce {

    public static void main(String[] args) throws Exception{
        // --checkpoint-interval 3000  --topics kafka-in --bootstrap.servers master:9092,slave1:9092,slave2:9092
        // --auto.offset.reset earliest --group.id killer --enable.auto.commit false
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        ParameterTool parameterTool = ParameterTool.fromArgs(args);

        // exactly once
        env.enableCheckpointing(parameterTool.getLong("checkpoint-interval",15000), CheckpointingMode.EXACTLY_ONCE);

        DataStreamSource<String> socketTextStream = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<String> map = socketTextStream.map(new MapFunction<String, String>() {
            @Override
            public String map(String s) throws Exception {
                if ("error".equals(s)) throw new RuntimeException();
                return s;
            }
        });

        // kafka source
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers(parameterTool.getRequired("bootstrap.servers"))
                .setTopics(parameterTool.get("topics").split(","))
                .setGroupId(parameterTool.get("group.id"))
                .setStartingOffsets(OffsetsInitializer.earliest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                // 设隔离级别
                .setProperty("isolation.level","read_committed")
                .build();

        DataStreamSource<String> lines = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "Kafka Source");

        SingleOutputStreamOperator<String> upper = lines.map(String::toUpperCase);
        Properties properties = new Properties();
        properties.put("transaction.timeout.ms","15000");
        // Kafka Sink
        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers(parameterTool.getRequired("bootstrap.servers"))
                .setDeliverGuarantee(DeliveryGuarantee.EXACTLY_ONCE)
                .setKafkaProducerConfig(properties)
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic(parameterTool.get("topic-out"))
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build()
                )
                .build();
        DataStream<String> union = upper.union(socketTextStream);

        union.sinkTo(kafkaSink);
        env.execute("");
    }
}
