package com.abyss.transformation;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.operators.PartitionOperator;
import org.apache.flink.api.java.tuple.Tuple2;

/**
 * 按照值的范围进行分区
 */
public class PartitionByRangeDemo {
    public static void main(String[] args) throws Exception {
        // 1. Env
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 2. source
        DataSource<Long> sourceSeq = env.generateSequence(1, 100);
        MapOperator<Long, Tuple2<Long, Integer>> source = sourceSeq.map(new MapFunction<Long, Tuple2<Long, Integer>>() {
            @Override
            public Tuple2<Long, Integer> map(Long value) throws Exception {
                return Tuple2.of(value, 1);
            }
        });

        // 3. 进行hash分区
        PartitionOperator<Tuple2<Long, Integer>> partitionedDataSet = source.partitionByRange(0);

        // 4. 进行分区号的打印
        MapOperator<Tuple2<Long, Integer>, Tuple2<Long, Integer>> map = source.map(new RichMapFunction<Tuple2<Long, Integer>, Tuple2<Long, Integer>>() {
            @Override
            public Tuple2<Long, Integer> map(Tuple2<Long, Integer> value) throws Exception {
                return Tuple2.of(value.f0, getRuntimeContext().getIndexOfThisSubtask());
            }
        });

        // 5. print
        map.print();
    }
}
