package com.zhang.third.day10;

import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.util.Random;

/**
 * @title: 解决数据情倾斜
 * @author: zhang
 * @date: 2022/4/14 09:49
 */
public class DataSkew {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        env
                .addSource(new SourceFunction<Integer>() {
                    @Override
                    public void run(SourceContext<Integer> ctx) throws Exception {
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(1, 1000L);
                        ctx.collectWithTimestamp(2, 2000L);
                    }

                    @Override
                    public void cancel() {

                    }
                })
                // 加随机数打散数据到不同并行子任务
                .map(new MapFunction<Integer, Tuple2<String, Integer>>() {
                    @Override
                    public Tuple2<String, Integer> map(Integer value) throws Exception {
                        return Tuple2.of(value + "_" + new Random().nextInt(4), 1);
                    }
                })
                .keyBy(r -> r.f0)
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                // 对打散数据进行聚合
                .aggregate(new CountAgg())
                // 把打散的 key 还原为真正的 key
                .map(new MapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {
                    @Override
                    public Tuple2<String, Integer> map(Tuple2<String, Integer> value) throws Exception {
                        return Tuple2.of(value.f0.split("_")[0],value.f1);
                    }
                })
                // 二次 KeyBy 进行结果统计，然后输出
                .keyBy(r->r.f0)
                .sum("f1")
                .print();

        env.execute();
    }

    public static class CountAgg implements AggregateFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>> {
        @Override
        public Tuple2<String, Integer> createAccumulator() {
            return Tuple2.of("", 0);
        }

        @Override
        public Tuple2<String, Integer> add(Tuple2<String, Integer> value, Tuple2<String, Integer> accumulator) {
            accumulator.f0 = value.f0;
            accumulator.f1 = value.f1 + accumulator.f1;
            return accumulator;
        }

        @Override
        public Tuple2<String, Integer> getResult(Tuple2<String, Integer> accumulator) {
            return accumulator;
        }

        @Override
        public Tuple2<String, Integer> merge(Tuple2<String, Integer> a, Tuple2<String, Integer> b) {
            return null;
        }
    }
}
