package com.abyss.transformation;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.tuple.Tuple2;
/**
 * @author Abyss
 * @date 2020/10/4
 * @description 通过aggregate来统计 ip的数量和  数量最多的ip以及数量最少的ip是谁
 */
public class AggregateDemo {
    public static void main(String[] args) throws Exception {
        // 1. Env
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 2. Source
        DataSource<String> fileSource = env.readTextFile("/Users/abyss/Dev/toys/flink/H-flink-learn/src/main/resources/apache.log");

        // 3. 转变为tuple
        MapOperator<String, Tuple2<String, Integer>> tuple = fileSource.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                String[] strings = value.split(" ");
                return Tuple2.of(strings[0], 1);
            }
        });

        // 4. 聚合统计 分组后的ip个数
        AggregateOperator<Tuple2<String, Integer>> result1 = tuple.groupBy(0).sum(1);

        /**
         * 结果类似：
         * (86.149.9.216,1)
         * (10.0.0.1,7)
         * (83.149.9.216,6)
         */
        // 5. 计算ip数量最少的
        AggregateOperator<Tuple2<String, Integer>> minResult = result1.min(1);

        // 6. 计算ip最多的
        AggregateOperator<Tuple2<String, Integer>> maxResult = result1.max(1);

        // 7. print
        result1.print();
        minResult.print();
        maxResult.print();
    }
}
