package cn.azzhu.day06;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author azzhu
 * @create 2020-09-20 00:15:13
 */
public class OperatorStateDemo {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.enableCheckpointing(5000);
        env.setParallelism(2);
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(2,2000));

        env.setStateBackend(new FsStateBackend("file:///D:\\bigdata\\flink-learning\\backend"));

        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        //todo 控制程序什么时候出现异常
        final DataStreamSource<String> lines = env.socketTextStream("hadoop105", 9999);
        lines.map(new MapFunction<String, String>() {
            @Override
            public String map(String line) throws Exception {
                if(line.startsWith("azzhu")) {
                    System.out.println(1/0);
                }
                return line;
            }
        }).print();

       // final DataStreamSource<Tuple2<String, String>> tp = env.addSource(new MyParFileSource("data"));
        final DataStreamSource<Tuple2<String, String>> tp = env.addSource(new MyExactlyOnceParFileSource("data"));

        tp.print();
        env.execute("OperatorStateDemo");
    }
}
