package sink;

import org.apache.flink.api.common.functions.RichMapFunction;
        import org.apache.flink.api.java.tuple.Tuple2;
        import org.apache.flink.streaming.api.datastream.DataStream;
        import org.apache.flink.streaming.api.datastream.DataStreamSource;
        import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
        import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * Author itcast
 * Date 2021/6/17 15:00
 * 需求： 3个线程处理90个数字， 大于10的数字
 */
public class RebalanceDemo {
    public static void main(String[] args) throws Exception {
        //1.env 设置并行度为3
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(6);
        //2.source fromSequence 1-100
        DataStreamSource<Long> source = env.fromSequence(1, 100);
        //3.Transformation
        //下面的操作相当于将数据随机分配一下,有可能出现数据倾斜，过滤出来大于10
        DataStream<Long> filterDS = source.filter(s -> s > 10);
        //3.1 接下来使用map操作,将Long数据转为 tuple2(分区编号/子任务编号, 1)
        /*SingleOutputStreamOperator<Tuple2<Integer, Integer>> mapDS = filterDS.map(new RichMapFunction<Long, Tuple2<Integer*//**CPU的核心编号*//*, Integer>>() {
            @Override
            public Tuple2<Integer, Integer> map(Long value) throws Exception {
                //通过getRuntimeContext获取到任务Index
                int idx = getRuntimeContext().getIndexOfThisSubtask();
                //返回Tuple2(任务Index,1)
                return Tuple2.of(idx, 1);
            }
        });
        //按照子任务id/分区编号分组，统计每个子任务/分区中有几个元素
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> result1 = mapDS.keyBy(i -> i.f0)
                //对当前的数据流根据 key 进行分组聚合
                .sum(1);*/
        //3.2 重新执行以上操作在filter之后先 rebalance 再map ,同上
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> mapDS = filterDS
                .rebalance()
                .map(new RichMapFunction<Long, Tuple2<Integer/**CPU的核心编号*/, Integer>>() {
                    @Override
                    public Tuple2<Integer, Integer> map(Long value) throws Exception {
                        //通过getRuntimeContext获取到任务Index
                        int idx = getRuntimeContext().getIndexOfThisSubtask();
                        //返回Tuple2(任务Index,1)
                        return Tuple2.of(idx, 1);
                    }
                });
        //按照子任务id/分区编号分组，统计每个子任务/分区中有几个元素
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> result2 = mapDS.keyBy(i -> i.f0)
                //对当前的数据流根据 key 进行分组聚合
                .sum(1);
        //4.sink
        //result1.print("没有重分区");
        result2.print("重分区");
        //5.execute
        env.execute();
    }
}
