package com.fwmagic.flink.batch;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class BatchTestJava {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStream<Tuple2<String, Integer>> dataStream = env.fromElements(Tuple2.of("a", 3), Tuple2.of("d", 4), Tuple2.of("c", 2), Tuple2.of("c", 5), Tuple2.of("a", 5));
        DataStreamSource<Integer> dataStream2 = env.fromElements(3, 1, 2, 4, 5);
        /*dataStream.keyBy(0).reduce(new ReduceFunction<Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> reduce(Tuple2<String, Integer> v1, Tuple2<String, Integer> v2) throws Exception {
                return Tuple2.of(v1.f0, v2.f1+v1.f1);
            }
        }).print();*/
        //滚动计算，计算一条输出一条，保存状态，再计算下一条

        /**
         * (a,3)
         * (d,4)
         * (c,2)
         * (c,7)
         * (a,8)
         */
        //dataStream.keyBy(0).sum(1).print();
        /**
         * (a,3)
         * (d,4)
         * (c,2)
         * (c,5)
         * (a,5)
         * */

        //dataStream.keyBy(0).max(1).print();

        //滚动计算指定key的最大值，返回最大值对应的元素
        /**
         * (a,3)
         * (d,4)
         * (c,2)
         * (c,2)
         * (a,3)
         */
        dataStream.keyBy(0).minBy(1).print();
        env.execute(StreamExecutionEnvironment.DEFAULT_JOB_NAME);
    }
}
