package cn.smileyan.demo;


import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.MultipleParameterTool;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 简单的 keyed process + stateful 例子
 * <p> 1. kafka 通讯，每次接收String 类型，并根据此进行 keyBy</p>
 * <p> 2. 如果字符串首次出现或出现总次数为奇数，则设置为 1；否则则设置为 0 </p>
 * <p> 3. 打印接收到的字符串的状态 </p>
 * @author Smileyan
 */
@Slf4j
public class KafkaToTaskStateJob {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        final MultipleParameterTool cmd = MultipleParameterTool.fromArgs(args);
        final String bootstrapServer = cmd.get("bs", "localhost:9092");
        final String kafkaConsumerGroup = cmd.get("kcg", "flink-consumer");
        final String inputTopic = cmd.get("it", "stateful-job");

        log.info("broker {} topic {}", bootstrapServer, inputTopic);
        // Flink Kafka Consumer
        final KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setGroupId(kafkaConsumerGroup)
                .setStartingOffsets(OffsetsInitializer.latest())
                .setBootstrapServers(bootstrapServer)
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setTopics(inputTopic)
                .build();

        final DataStreamSource<String> kafkaStream = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "Kafka Source");

        // 0 1 状态更换
        SingleOutputStreamOperator<Tuple2<String, Integer>> output = kafkaStream.keyBy(value -> value).process(new TaskStateUpdater());

        // 输出结果可以考虑 sinkTo，此处只打印一下
        output.print();

        env.execute("Flink Kafka Example");
    }
}
