package com.zhang.hadoop.flink.test3;

import com.zhang.hadoop.flink.base.Event;
import org.apache.flink.api.common.functions.Partitioner;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;

import java.util.ArrayList;
import java.util.List;

/**
 * @author: zhang yufei
 * @createTime:2022/5/21 16:32
 * @description:
 */
public class TransformPartitionTest {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        List<Event> events = new ArrayList<>();
        events.add(new Event("huichao", "./yindao", 1000L));
        events.add(new Event("yanghui", "./yindao", 1000L));
        events.add(new Event("yanghui", "./gangmen", 2000L));
        events.add(new Event("yanghui", "./siwajiao", 3000L));
        events.add(new Event("yuping", "./siwajiao", 2000L));
        events.add(new Event("yuping", "./yindao", 4000L));
        events.add(new Event("yangdan", "./gangmen", 2000L));
        events.add(new Event("yangdan", "./yindao", 6000L));
        events.add(new Event("jingru", "./niaodao", 2000L));
        DataStreamSource<Event> stream = env.fromCollection(events);

        //1.随机分区
        stream.shuffle().print("随机分区").setParallelism(4);

        //2.轮询分区
        stream.rebalance().print("轮询分区").setParallelism(4);

        //3.rescale重缩放分区
        env.addSource(new RichParallelSourceFunction<Integer>() {

            @Override
            public void run(SourceContext<Integer> sourceContext) throws Exception {
                for (int i = 1; i <= 8; i++) {
                    //将奇偶数分别发送到0号和1号进行分区
                    if (i % 2 == getRuntimeContext().getIndexOfThisSubtask()) {
                        sourceContext.collect(i);
                    }
                }
            }

            @Override
            public void cancel() {

            }
        }).setParallelism(2).rescale().print("重缩放分区").setParallelism(4);

        //4.广播
        stream.broadcast().print("广播").setParallelism(4);

        //5.全局分区
        stream.global().print("全局分区").setParallelism(4);

        //6.自定义重分区
        env.fromElements(1, 2, 3, 4, 5, 6, 7, 8)
                .partitionCustom(new Partitioner<Integer>() {

                    @Override
                    public int partition(Integer key, int i) {
                        return key % 2;
                    }
                }, new KeySelector<Integer, Integer>() {
                    @Override
                    public Integer getKey(Integer integer) throws Exception {
                        return null;
                    }
                }).print().setParallelism(4);

        env.execute();
    }
}
