package javaVersion.learn.batchProcess;

import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;


public class Pro6_Acc {
    public static void main(String[] args) throws Exception {
        //流处理下的共享变量使用
//        streamAcc();
        //批处理下的累加变量使用
        batchAcc();
    }

    private static void batchAcc() throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        DataSource<String> data = env.fromElements(new String[]{"a", "b", "c", "d"});
        MapOperator<String, String> result = data.map(new RichMapFunction<String, String>() {
            IntCounter numLines = new IntCounter();
            int sum = 0;

            @Override
            public void open(Configuration parameters) throws Exception {
                getRuntimeContext().addAccumulator("myAcc", numLines);
            }

            @Override
            public String map(String value) throws Exception {
                //设置普通变量，并行度为1的情况下，数据统计准确
                //如果并行度不为1的话，普通变量则不能统计准确
                sum += 1;
                System.out.println("普通变量统计：" + sum);

                numLines.add(1);
                return value;
            }
        }).setParallelism(1);
        result.writeAsText("./data/output/count0", FileSystem.WriteMode.OVERWRITE);
        String total = env.execute().getAccumulatorResult("myAcc").toString();
        System.out.println("累加变量为：" + total );
    }

    private static void streamAcc() throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> data = env.fromElements(new String[]{"a", "b", "c", "d"});
        SingleOutputStreamOperator<String> mapData = data.map(new RichMapFunction<String, String>() {
            IntCounter numLines = new IntCounter();

            //富函数的生命周期方法open
            @Override
            public void open(Configuration parameters) throws Exception {
                getRuntimeContext().addAccumulator("num-lines", numLines);
            }

            //处理逻辑
            @Override
            public String map(String value) throws Exception {
                //每处理一次，累加器都加1
                numLines.add(1);
                System.out.println("LocalValue" + numLines.getLocalValue() + "，线程id：" + Thread.currentThread().getId());

                return value;
            }
        });
        mapData.print();
        JobExecutionResult execute = env.execute();
        String total = execute.getAccumulatorResult("num-lines").toString();
        System.out.println(total);
    }
}
