package com.jaymin.flink.datastream.partitioner;

import com.jaymin.flink.datastream.source.AccessSourceV2;
import com.jaymin.flink.datastream.transformation.Access;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author jaymin
 * @since 2024/4/5 20:41
 */
public class PartitionerApp {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);
        DataStreamSource<Access> accessDataStreamSource = env.addSource(new AccessSourceV2());
        int parallelism = accessDataStreamSource.getParallelism();
        System.out.println(parallelism);
        accessDataStreamSource.map(new MapFunction<Access, Tuple2<String, Access>>() {
                    @Override
                    public Tuple2<String, Access> map(Access value) throws Exception {
                        return Tuple2.of(value.getDomain(), value);
                    }
                })
                .partitionCustom(new PKPartitioner(), (KeySelector<Tuple2<String, Access>, String>) value -> value.f0)
                .map(new MapFunction<Tuple2<String, Access>, Access>() {
                    @Override
                    public Access map(Tuple2<String, Access> value) throws Exception {
                        return value.f1;
                    }
                })
                .print()
        ;
        env.execute("PartitionerApp");
    }
}
