
/*
 * Copyright © 2021 https://www.cestc.cn/ All rights reserved.
 */

package com.zx.learn.flink.transform;

import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 通过重分区保证每个 cpu 处理数据均衡
 */
public class TransformationRebalance {
    public static void main(String[] args) throws Exception {
        //1.env 设置并行度为3
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //env.enableCheckpointing(1000);
        // 并行度为3 taskslot用3个
        env.setParallelism(3);
        //2.source fromSequence 0-100
        DataStreamSource<Long> source = env.fromSequence(0, 100);
        //3.Transformation
        //下面的操作相当于将数据随机分配一下,有可能出现数据倾斜，过滤出来大于10
        //boolean filter(T value) throws Exception;
        //11 ~ 100
        //默认会将 90 条数据分不到3个分区了
        DataStream<Long> filterDS = source.filter(w -> w > 10);
        //3.1 接下来使用map操作,将Long数据转为(分区编号/子任务编号, 数据)
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> mapDS = filterDS.map(new RichMapFunction<Long, Tuple2<Integer, Integer>>() {
            @Override
            public Tuple2<Integer, Integer> map(Long value) throws Exception {
                // getRuntimeContext().getIndexOfThisSubtask() 当前这个子任务执行的CPU的索引 0 ， 1 ，2
                // (CPU的索引,计数(1)) (0,1) (1,1) (2,1) (1,1) (2,1) (0,1)
                return Tuple2.of(getRuntimeContext().getIndexOfThisSubtask(), 1);
            }
        });
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> result = mapDS.keyBy(t -> t.f0).sum(1);
        //通过getRuntimeContext获取到任务Index
        //返回Tuple2(任务Index,1)
        //按照子任务id/分区编号分组，统计每个子任务/分区中有几个元素
        //3.2 重新执行以上操作在filter之后先 rebalance 再map
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> mapDS1 = filterDS
                //当前处理重分布
                .rebalance()
                .map(new RichMapFunction<Long, Tuple2<Integer, Integer>>() {

                    @Override
                    public Tuple2<Integer, Integer> map(Long value) throws Exception {
                        return Tuple2.of(getRuntimeContext().getIndexOfThisSubtask(), 1);
                    }
                });
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> result1 = mapDS1.keyBy(t -> t.f0).sum(1);
        //result.print("重分布之前");
        result1.print("重分布之后");
        //4.sink
        //result1.print();//有可能出现数据倾斜
        //result2.print();
        //5.execute
        env.execute();
    }
}
