package com.wunong.smart.bigdata.flink.job;

import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author create by zealot
 */
public class ReduceSumJob {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment()
                // 指定执行并行度
                .setParallelism(2);
        DataStream<Integer> stream = env.fromElements(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);

        SingleOutputStreamOperator<Integer> aggregateStream = stream
                .keyBy(new KeySelector<Integer, Integer>() {
                    @Override
                    public Integer getKey(Integer value) throws Exception {
                        // 分2组求和
                        return value % 2;
                    }
                })
                .reduce((i1, i2) -> i1 + i2)
                .setParallelism(1);

        // 输出流结果
        aggregateStream.print("Sum:");

        // 执行job
        env.execute(ReduceSumJob.class.getName());
    }
}