package com.abyss.transformation;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.operators.ReduceOperator;
import org.apache.flink.api.java.tuple.Tuple2;
/**
 * minBy maxBy 统计  数量最多的ip以及数量最少的ip是谁
 */
public class MinByMaxByDemo {
    public static void main(String[] args) throws Exception {
        // 1. Env
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 2. Source
        DataSource<String> fileSource = env.readTextFile("/Users/abyss/Dev/toys/flink/H-flink-learn/src/main/resources/apache.log");

        // 3. 转变为tuple
        MapOperator<String, Tuple2<String, Integer>> tuple = fileSource.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                String[] strings = value.split(" ");
                return Tuple2.of(strings[0], 1);
            }
        });

        // 4. 聚合 分组后的ip数量
        AggregateOperator<Tuple2<String, Integer>> ipWithCount = tuple.groupBy(0).sum(1);

        // 5. 通过minBy找最小的
        ReduceOperator<Tuple2<String, Integer>> min = ipWithCount.minBy(1);

        // 6. 通过maxBy找最大的
        ReduceOperator<Tuple2<String, Integer>> max = ipWithCount.maxBy(1);

        // 7. print
        min.print();
        max.print();
    }
}
