package com.learn.transform;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @create: 2023-04-18 10:55
 * @author: Mr.Du
 * --------------
 * @notes:
 * 读取apache.log日志，统计ip地址访问pv数量，使用 reduce 操作聚合成一个最终结果
 **/
public class ReduceDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<String> lines = env.readTextFile("./data/input/apache.log");

        SingleOutputStreamOperator<Tuple2<String, Integer>> ipAndOne = lines.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String line) throws Exception {
                String[] dataArray = line.split(" ");
                return Tuple2.of(dataArray[0], 1);
            }
        });

        ipAndOne.keyBy(a->a.f0)
                .reduce(new ReduceFunction<Tuple2<String, Integer>>() {
                    @Override
                    public Tuple2<String, Integer> reduce(Tuple2<String, Integer> tuple1, Tuple2<String, Integer> tuple2) throws Exception {

                        return Tuple2.of(tuple1.f0,tuple1.f1+tuple2.f1);
                    }
                }).print();

        env.execute("");
    }
}
