package com.zhang.hadoop.flink.test3;

import com.zhang.hadoop.flink.base.Event;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import sun.swing.plaf.synth.DefaultSynthStyle;

import java.util.ArrayList;
import java.util.List;

/**
 * @author: zhang yufei
 * @createTime:2022/5/15 17:03
 * @description:
 */
public class TransformSimpleAggTest {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        List<Event> events = new ArrayList<>();
        events.add(new Event("huichao", "./yindao", 1000L));
        events.add(new Event("yanghui", "./yindao", 1000L));
        events.add(new Event("yanghui", "./gangmen", 2000L));
        events.add(new Event("yanghui", "./siwajiao", 3000L));
        events.add(new Event("yuping", "./siwajiao", 2000L));
        events.add(new Event("yuping", "./yindao", 4000L));
        events.add(new Event("yangdan", "./gangmen", 2000L));
        events.add(new Event("yangdan", "./yindao", 6000L));
        events.add(new Event("jingru", "./niaodao", 2000L));
        DataStreamSource<Event> stream = env.fromCollection(events);

        //按键分组之后进行聚合，提取当前用户最近一次访问
        stream.keyBy(new KeySelector<Event, String>() {
            @Override
            public String getKey(Event event) throws Exception {
                return event.user;
            }
        }).max("timestamp")
                .print("max");

        stream.keyBy(data -> data.user)
                .maxBy("timestamp")
                .print("maxBy");

        env.execute();
    }
}
