package com.abyss.transformation;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.operators.ReduceOperator;
import org.apache.flink.api.java.tuple.Tuple2;

/**
 * 读取apache.log日志，统计ip地址访问pv数量，使用 reduce 操作聚合成一个最终结果
 * 结果类似：
 * (86.149.9.216,1)
 * (10.0.0.1,7)
 * (83.149.9.216,6)
 */
public class ReduceDemo {
    public static void main(String[] args) throws Exception {
        // 1. Env
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 2. Source file
        DataSource<String> fileSource = env.readTextFile("/Users/abyss/Dev/toys/flink/H-flink-learn/src/main/resources/apache.log");

        // 3. 将数据转换为元组
        MapOperator<String, Tuple2<String, Integer>> tuple = fileSource.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                String[] strings = value.split(" ");
                return Tuple2.of(strings[0], 1);
            }
        });

        // 4. 聚合, 按照ip地址求数量
        ReduceOperator<Tuple2<String, Integer>> result = tuple.groupBy(0).reduce(new ReduceFunction<Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
                return Tuple2.of(value1.f0, value1.f1 + value2.f1);
            }
        });

        // 5. print
        result.print();
    }
}
