package com.hngy.java.stream.transformaction;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Arrays;

public class StreamPartitionOpJava {

    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //注意：默认情况下Fink任务中算子的并行度会读取当前机器的CPU个数
        //fromCollection的并行度为1，由源码可知
        DataStreamSource<Integer> text = env.fromCollection(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10));
        //使用shuffle分区规则
        //shuffleOp(text);

        //使用rebalance分区规则
        //rebalanceOp(text);

        //使用rescale分区规则
        //rescaleOp(text);

        //使用broadcast分区规则，此代码一共会打印40条数据，因为print的并行度为4
        //broadcastOp(text);

        //自定义分区规则：根据数据的奇偶性进行分区
        //注意：此时虽然print算子的并行度是4，但是自定义的分区规则只会把数据分发给2个并行
        custormPartitionOp(text);

        env.execute("StreamPartitionOpJava");
    }

    private static void custormPartitionOp(DataStreamSource<Integer> text) {
        text.map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer integer) throws Exception {
                return integer;
            }
        }).setParallelism(2)
                .partitionCustom(new MyPartitionerJava(), new KeySelector<Integer, Integer>() {
                    @Override
                    public Integer getKey(Integer integer) throws Exception {
                        return integer;
                    }
                })
                .print()
                .setParallelism(4);
    }

    private static void broadcastOp(DataStreamSource<Integer> text) {
        text.map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer integer) throws Exception {
                return integer;
            }
        }).setParallelism(2)
                .broadcast()
                .print()
                .setParallelism(4);
    }

    private static void rescaleOp(DataStreamSource<Integer> text) {
        text.map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer integer) throws Exception {
                return integer;
            }
        }).setParallelism(2)
                .rescale()
                .print()
                .setParallelism(4);
    }

    private static void rebalanceOp(DataStreamSource<Integer> text) {
        text.map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer integer) throws Exception {
                return integer;
            }
        }).setParallelism(2)
                .rebalance()
                .print()
                .setParallelism(4);
    }

    private static void shuffleOp(DataStreamSource<Integer> text) {
        text.map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer num) throws Exception {
                return num;
            }
        }).setParallelism(2)
                .shuffle()
                .print()
                .setParallelism(4);
    }
}
