package com.wunong.smart.bigdata.flink.job;

import com.wunong.smart.bigdata.flink.aggregate.CountAggregateFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
import org.apache.flink.streaming.api.windowing.triggers.CountTrigger;

/**
 * @author create by zealot
 */
public class SumJob {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<Integer> stream = env.fromElements(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);

        final int size = 2;

        // 按照奇偶分组
        DataStream<Integer> reducedStream = stream
                // 分成2组
                .keyBy(v -> v % size)
                // 指定全局window
                .window(GlobalWindows.create())
                // 全局window必须要指定触发器：此处使用满足数量则执行
                .trigger(CountTrigger.of(5))
                .aggregate(new CountAggregateFunction())
                // 设置执行任务的并行度，一个分组一个任务
                .setParallelism(1);

        DataStream<String> resultStream = reducedStream
                .map(v -> String.format("key=%d, value=%d", v % size, v)) // 转换数据格式
                .returns(Types.STRING);

        // 输出结果
        resultStream.print("Result");

        // 执行任务
        env.execute(SumJob.class.getName());
    }

}