package com.j.lemon.learn.flink;

import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @author lijunjun
 */
public class FlinkExample {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        executionEnvironment.setParallelism(1);
        executionEnvironment.setBufferTimeout(2);
        DataStream<Tuple2<String, Integer>> dataStream = executionEnvironment.fromElements(
                new WordCount("zhangsan", 1),
                new WordCount("lisi", 2),
                new WordCount("lisi", 3))
                .flatMap(new RichFlatMapFunction<WordCount, Tuple2<String, Integer>>() {
                    private final IntCounter intCounter = new IntCounter();
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        getRuntimeContext().addAccumulator("ccc", this.intCounter);
                    }
                    @Override
                    public void flatMap(WordCount wordCount, Collector<Tuple2<String, Integer>> collector) throws Exception {
                        intCounter.add(1);
                        collector.collect(new Tuple2<>(wordCount.getWord(), wordCount.getCount()));
                    }
                }).setParallelism(1).keyBy((KeySelector<Tuple2<String, Integer>, Object>) tuple2 -> tuple2.f0)
                .sum(1).setParallelism(1)
                .filter((FilterFunction<Tuple2<String, Integer>>) stringIntegerTuple2 -> stringIntegerTuple2.f1 > 4).setParallelism(2);
        dataStream.print();
        JobExecutionResult result = executionEnvironment.execute();
        Object ccc = result.getAccumulatorResult("ccc");
        System.out.println(ccc);
    }
}
