package com.wuwangfu.window;

import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;

/**
 * @Author jcshen
 * @Date 2023-02-23
 * @PackageName:com.wuwangfu.window
 * @ClassName: CountWindowAllReduce
 * @Description:
 * @Version 1.0.0
 *
 * https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/datastream/operators/overview/#windowreduce
 */
public class CountWindowAllReduce {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> line = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<Integer> nums = line.map(Integer::parseInt);
        //按条数划分窗口
        AllWindowedStream<Integer, GlobalWindow> windowed = nums.countWindowAll(5);
        //根据窗口聚合
        SingleOutputStreamOperator<Integer> reduced = windowed.reduce(new ReduceFunction<Integer>() {
            /**
             *
             * @param v1 key第一次出现或中间累加的结果
             * @param v2 同一个分区里面key相同的其它的数据
             * @return
             * @throws Exception
             */
            @Override
            public Integer reduce(Integer v1, Integer v2) throws Exception {
                //org.apache.flink.runtime.state.heap.HeapReducingState.ReduceTransformation.apply
                // return previousState != null ? reduceFunction.reduce(previousState, value) : value;
                //增量聚合，不是满足触发条件再计算的，效率更高，更节省资源
                //查看源码方法：断点，查看调用栈
                return v1 + v2;
            }
        });
        reduced.print();

        env.execute();
    }
}
