package org.example.flink.operation;

import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Hello17Partitioner {
    public static void main(String[] args) throws Exception {
        //创建环境
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        //读取数据
        DataStreamSource<String> source = environment.readTextFile("data/partition.txt").setParallelism(1);
        SingleOutputStreamOperator<String> upperStream = source.map(new RichMapFunction<String, String>() {
            @Override
            public String map(String value) throws Exception {
                return "task[" + (getRuntimeContext().getIndexOfThisSubtask() + 1) + "]value[" + value + "]";
            }
        }).setParallelism(2);

        //输出数据
        // upperStream.global().print("GlobalPartitioner").setParallelism(4);
        // upperStream.rebalance().print("RebalancePartitioner").setParallelism(3);
        //upperStream.rescale().print("RescalePartitioner").setParallelism(4);
        // upperStream.shuffle().print("ShufflePartitioner").setParallelism(4);
        // upperStream.broadcast().print("BroadcastPartitioner").setParallelism(2);
        // upperStream.forward().print("ForwardPartitioner").setParallelism(2);
        // upperStream.keyBy(word -> word).print("KeyGroupStreamPartitioner").setParallelism(4);
        //自定义分区器
        // upperStream.partitionCustom(new Partitioner<String>() {
        //     @Override
        //     public int partition(String key, int numPartitions) {
        //         return numPartitions - 1;
        //     }
        // }, new KeySelector<String, String>() {
        //     @Override
        //     public String getKey(String value) throws Exception {
        //         return value;
        //     }
        // }).print("CustomPartitioner").setParallelism(4);
        upperStream.partitionCustom((k, n) -> n - 1, v -> v).print("CustomPartitioner").setParallelism(4);

        //运行环境
        environment.execute();

    }
}
