package stateful;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * 使用checkpoint
 */
public class CheckpointingDemo2 {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 伪装为root用户
        System.setProperty("HADOOP_USER_NAME", "root");

        // 开启checkpoint 5秒钟一次 默认重启策略 无限重启
        env.enableCheckpointing(5000);
        // 设置状态后端 状态存储的位置 设置为HDFS
        env.setStateBackend(new FsStateBackend(args[0]));

        DataStreamSource<String> lines = env.socketTextStream(args[1], Integer.parseInt(args[2]));

        lines.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
                for (String word : s.split(" ")) {
                    if (word.contains("error")) {
                        throw new RuntimeException("出现错误数据!");
                    } else {
                        collector.collect(Tuple2.of(word, 1));
                    }
                }
            }
        }).keyBy(t -> t.f0)
                .sum(1)
                .print();

        env.execute("");
    }
}
