package ex.datastream;

import ex.datastream.functions.richFunction.StatefulKeyedProcessFunc;
import ex.datastream.functions.function.FlatMapFuncBySplitter02;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;


public class Checkpoint01 extends ApiFrame {
    public static void main(String[] args) throws Exception {
        // 创建执行环境
        Checkpoint01 point01 = new Checkpoint01();
        point01.getEnv();

        // 开启checkpoint,每间隔5秒持久化到磁盘一次，可以实现无限重启
        point01.env.enableCheckpointing(5000);

        // 设置持久化路径
        point01.env.getCheckpointConfig().setCheckpointStorage("file:///e:/ck");

        // 加载数据源
        KafkaSource source = point01.getKafkaSource();
        DataStreamSource<String> dataStreamSource = point01.env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");

        // 处理数据
        SingleOutputStreamOperator<Tuple2<String, Integer>> items = dataStreamSource.flatMap(new FlatMapFuncBySplitter02());

        items.keyBy(value -> value.f0).process(new StatefulKeyedProcessFunc()).uid("my-uid");

        point01.env.execute("save checkpoint job");

    }
}
