package day02;

import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Description: sum滚动聚合
 * @Author: ZYX
 * @Date: 2022/2/9 15:16
 * @Version: 1.0
 */
public class Demo05 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<Tuple2<Integer, Integer>> streamSource = env.fromElements(Tuple2.of(1, 2), Tuple2.of(1, 3),Tuple2.of(1, 9));

        /**
         * keyBy 通过指定 key 来将 DataStream 转换成 KeyedStream。基于不同的 key，流中的
         * 事件将被分配到不同的分区中去。所有具有相同 key 的事件将会在接下来的操作符的同
         * 一个子任务槽中进行处理。拥有不同 key 的事件可以在同一个任务中处理。但是算子只
         * 能访问当前事件的key 所对应的状态。
         */
        KeyedStream<Tuple2<Integer, Integer>, Integer> keyedStream = streamSource.keyBy(obj -> obj.f0);

        keyedStream.sum(1).print();

        keyedStream.reduce(new ReduceFunction<Tuple2<Integer, Integer>>() {
            @Override
            public Tuple2<Integer, Integer> reduce(Tuple2<Integer, Integer> value1, Tuple2<Integer, Integer> value2) throws Exception {
                return Tuple2.of(value1.f0,Math.max(value1.f1, value2.f1));
            }
        }).print();

        env.execute();
    }
}
