package com.atguigu.Flink.datastream.transform;

import com.atguigu.Flink.POJO.Event;
import com.atguigu.Flink.POJO.WordCount;
import com.atguigu.Flink.function.ClickSource;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Flink03_ReduceAgg {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStreamSource<Event> ds = env.addSource(new ClickSource());
        ds.print("input");

        SingleOutputStreamOperator<WordCount> reduce = ds.map(event -> new WordCount(event.getUser(), 1L))
                .keyBy(wordCount -> wordCount.getWord())
                .reduce(new ReduceFunction<WordCount>() {
                    @Override
                    public WordCount reduce(WordCount wordCount, WordCount t1) throws Exception {
                        System.out.println("reduce....");
                        return new WordCount(wordCount.getWord(), wordCount.getCount() + t1.getCount());

                    }
                });
        reduce.print("reduce");
        reduce.keyBy(wordCount -> true)
                .reduce(new ReduceFunction<WordCount>() {
                    @Override
                    public WordCount reduce(WordCount wordCount, WordCount t1) throws Exception {
                        if(wordCount.getCount()>t1.getCount()){
                            return wordCount;
                        }else {
                            return t1;
                        }
                    }
                }).print("max");

        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
