package com.fwmagic.flink.restartstrategies;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 注意：
 * 1、checkpoint要保存到hdfs中，需要在flink的lib目录中加入hadoopjar包：flink-shaded-hadoop-2-uber-2.7.5-10.0.jar
 * 2、flink-conf.yaml中设置：
 *  state.backend: filesystem
 *  state.checkpoints.dir: hdfs://hd1:9000/flink/flink-checkpoints/
 *
 * 页面提交任务：指定savepoint目录
 * 命令行提交：从checkpoint中恢复数据
 * bin/flink run -m 192.168.62.131:8081 -c com.fwmagic.flink.restartstrategies.CheckPointingDemo -p 4 -s hdfs://192.168.62.131:9000/flink/flink-checkpoints/ch2/50075b8bf20a10b63ff473e7a61e54cc/chk-54 /home/hadoop/apps/flink/flink-1.9.0/examples/fwmagic-flink-1.0.jar
 *
 */
public class CheckPointingDemo {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        //只有设置了CheckPoint,才会有重启策略
        env.enableCheckpointing(5000);

        //设置最多重启3次，重启3次后程序会挂掉
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 2000));

        //保存StateBackend在本地
        //env.setStateBackend(new FsStateBackend("file:///Users/fangwei/soft/mljr/workspace/flink/fwmagic-flink/ck"));
        //保存在hdfs
        //System.setProperty("HADOOP_USER_NAME", "hadoop");
        //env.setStateBackend(new FsStateBackend("hdfs://192.168.62.131:9000/flink/flink-checkpoints/ch2"));

        //程序异常退出或者人工cancel掉，checkpoint不删除，保留！
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        DataStreamSource<String> words = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOne = words.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String word) throws Exception {
                if(word.startsWith("duanwang")){
                    throw new RuntimeException("断网啦，程序出异常了！");
                }
                return Tuple2.of(word, 1);
            }
        });

        SingleOutputStreamOperator<Tuple2<String, Integer>> result = wordAndOne.keyBy(0).sum(1);

        result.print();

        env.execute("CheckPointingDemo");

    }
}
