package com.daidai.transform;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Rebalence {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.BATCH).setParallelism(3);

        DataStreamSource<Long> source = env.fromSequence(0, 100);
        SingleOutputStreamOperator<Long> filter = source.filter(new FilterFunction<Long>() {
            @Override
            public boolean filter(Long value) throws Exception {
                return value > 10;
            }
        });

        //rebalence之前
        filter.map(new RichMapFunction<Long, Tuple2<Integer, Integer>>() {
            @Override
            public Tuple2<Integer, Integer> map(Long value) throws Exception {
                int index = getRuntimeContext().getIndexOfThisSubtask();
                return Tuple2.of(index, 1);
            }
        }).keyBy(t -> t.f0).sum(1).print("rebalence之前");

        DataStream<Long> rebalance = filter.rebalance();

        //rebalence之后
        rebalance.map(new RichMapFunction<Long, Tuple2<Integer, Integer>>() {
            @Override
            public Tuple2<Integer, Integer> map(Long value) throws Exception {
                int index = getRuntimeContext().getIndexOfThisSubtask();
                return Tuple2.of(index, 1);
            }
        }).keyBy(t -> t.f0).sum(1).print("rebalence之后");

        env.execute();
    }
}
