package flink.api_study.window;

import flink.api_study.source.Person;
import flink.api_study.source.PersonSource;
import org.apache.commons.collections.keyvalue.TiedMapEntry;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.sql.Timestamp;
import java.util.Date;

public class WindowTest {
    public static void main(String[] args) throws Exception{
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStream<Person> stream = env.addSource(new PersonSource());
        stream.print();

        stream.keyBy(person -> person.getJob())
//                .window(TumblingProcessingTimeWindows.of(Time.seconds(10)))
                .window(SlidingProcessingTimeWindows.of(Time.seconds(10), Time.seconds(3)))
                .reduce((person, current) -> {
                    Person p = new Person();
//                    p.setUuid("聚合");
                    p.setUuid(person.getUuid()+","+current.getUuid());
                    p.setJob(person.getJob());
                    p.setAge(Math.max(person.getAge(),current.getAge()));
                    p.setTimestamp(new Timestamp(new Date().getTime()));
                    return p;
                })
        .print();
        env.execute();
    }



}
