package com.intmall.flink.operator;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class TransformReduceTest {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<Event> stream = env.fromElements(
                new Event("Mary", "./home", 1000L),
                new Event("Bob", "./cart", 2000L),
                new Event("Alice", "./cart", 2100L),
                new Event("Bob", "./product?id=1", 3000L),
                new Event("Bob", "./product?id=2", 3500L),
                new Event("Bob", "./product?id=3", 4000L),
                new Event("Bob", "./product?id=3", 3600L),
                new Event("Alice", "./product?id=1", 5100L),
                new Event("Mary", "./center", 6000L)
        );

        stream.map(event -> Tuple2.of(event.getUser(), 1L))
                .returns(Types.TUPLE(Types.STRING, Types.LONG))
                .keyBy(t -> t.f0)
                // 汇总统计访问量
                .sum(1)
                // 为每一条数据分配同一个key ，将聚合结果发送到一条流中去
                .keyBy(t -> true)
                .reduce((t1, t2) -> {
                    return t1.f1 > t2.f1 ? t1 : t2;
                }).print("reduce");


        env.execute();
    }
}
