package com.abyss;

import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.FileSystem;

/**
 * 累加器演示
 */
public class AccumulatorDemo2 {
    public static void main(String[] args) throws Exception {
        // 1. Env
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 2. Source
        DataSource<Long> source = env.generateSequence(1, 10);

        // 3. 在map方法中使用累加器
        MapOperator<Long, Long> map = source.map(new RichMapFunction<Long, Long>() {
            // 创建一个累加器的对象
            private IntCounter intCounter = new IntCounter();

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                // 注册累加器
                getRuntimeContext().addAccumulator("num", this.intCounter);
            }

            @Override
            public Long map(Long value) throws Exception {
                // 使用累加器 完成1的累加
                this.intCounter.add(1);
                return value;
            }
        });

        map.writeAsText("data/output/ac2", FileSystem.WriteMode.OVERWRITE);

        // 拿到任务的执行结果
        JobExecutionResult jobExecResut = env.execute();

        // 获取累加器的结果
        int accumulatorResult = jobExecResut.getAccumulatorResult("num");
        System.out.println("累加器结果: " + accumulatorResult);
    }
}