package com.example.checkpoint;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created with IntelliJ IDEA.
 * ClassName: SavepointDemo
 * Package: com.example.checkpoint
 * Description:
 * User: fzykd
 *
 * @Author: LQH
 * Date: 2023-07-29
 * Time: 15:31
 */

//保存点
public class SavepointDemo {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        //设置访问hdfs用户名
        System.setProperty("HADOOP_USER_NAME","bobo");

        //设置检查点
        //1.开启检查点 设置检查点周期时长 和 策略 默认是EXACTLY_ONCE精准一次   AT_LEAST_ONCE至少一次
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        //2.开启备份之后 但是还有状态的持久化设置
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        //Storage存储 将检查点持久化存储到什么位置 hdfs的内部通信端口 8020
        //要访问hdfs就涉及到权限的问题 在设置用户名 权限
        checkpointConfig.setCheckpointStorage("hdfs://hadoop102:8020/che");
        //其他配置
        //超时时间 默认10分钟 设置一分钟
        checkpointConfig.setCheckpointTimeout(60000);
        //同时运行的checkpoint并发数
        checkpointConfig.setMaxConcurrentCheckpoints(2);
        //最小的等待间隔时间
        checkpointConfig.setMinPauseBetweenCheckpoints(1000);
        //取消作业时 外部系统是否会删除
        checkpointConfig.setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //允许连夜失败的次数 默认0 表示checkpoint一次失败 job就挂了
        checkpointConfig.setTolerableCheckpointFailureNumber(10);


        //每一个算子后面 后可以跟上uid 系统去识别这些id 也可以指定name 时开发者看的
        env.socketTextStream("hadoop102",7777).uid("socket")
                .flatMap(new FlatMapFunction<String, Tuple2<String,Integer>>() {
                    @Override
                    public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                        String[] s = value.split(" ");
                        for (String word : s) {
                            out.collect(Tuple2.of(s[0],1));
                        }
                    }
                }).uid("flatmap-wc").name("wc-flatmap")
                .keyBy(value -> value.f0)
                .sum(1).uid("sum-wc")
                .print().uid("print-wc");


        env.execute();

    }
}
