package com.epoint.flinkdemo.transform;

import org.apache.flink.api.common.functions.FoldFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Random;

/**
 * @author liufl
 * @version [版本号, 21-4-8]
 */
public class FoldTest
{
    private static final Logger LOG = LoggerFactory.getLogger(KeybyTest.class);

    private static final String[] TYPE = {"苹果", "梨", "西瓜", "葡萄", "火龙果"};

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<Tuple2<String, Integer>> sourceStream = env
                .addSource(new SourceFunction<Tuple2<String, Integer>>()
                {
                    private volatile boolean isRunning = true;

                    private final Random random = new Random();

                    @Override
                    public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
                        while (isRunning) {
                            Thread.sleep(1000);
                            ctx.collect(new Tuple2<>(TYPE[random.nextInt(TYPE.length)], 1));
                        }
                    }

                    @Override
                    public void cancel() {
                        isRunning = false;
                    }
                }, "order-info");

        // sourceStream.keyBy(0).print();

        // keyBy之后进行分组
        sourceStream.keyBy(new KeySelector<Tuple2<String, Integer>, String>()
        {
            // 返回空表示所有数据同一分区
            @Override
            public String getKey(Tuple2<String, Integer> value) throws Exception {
                return "";
            }
        })
                // 这里用hashmap做暂存器
                .fold(new HashMap<String, Integer>(), new FoldFunction<Tuple2<String, Integer>, HashMap<String, Integer>>()
        {
            @Override
            public HashMap<String, Integer> fold(HashMap<String, Integer> accumulator, Tuple2<String, Integer> value)
                    throws Exception {
                accumulator.put(value.f0, accumulator.getOrDefault(value.f0, 0) + value.f1);
                return accumulator;
            }
        }).print();

        env.execute();
    }
}
