package com.spx.chapter06;

import com.spx.chapter05.pojo.Event;
import com.spx.util.SampleDataUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

/**
 * create by undeRdoG on  2022-05-02  20:30
 * 凡心所向，素履以往，生如逆旅，一苇以航。
 */
public class AggregateFunctionTest {

    public static void main(String[] args) throws Exception {

        /*
        *  计算每个用户再10s内的访问时间的平均值
        * */
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        SingleOutputStreamOperator<Event> dataSource = env.fromCollection(SampleDataUtil.getSample())
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<Event>forBoundedOutOfOrderness(Duration.ZERO)
                                .withTimestampAssigner(new SerializableTimestampAssigner<Event>() {
                                    @Override
                                    public long extractTimestamp(Event element, long recordTimestamp) {
                                        return element.timestamp;
                                    }
                                })
                );

        WindowedStream<Event, String, TimeWindow> windowedStream = dataSource.keyBy(data -> data.user)
                .window(TumblingEventTimeWindows.of(Time.seconds(10)));

        SingleOutputStreamOperator<Tuple2<String, Long>> aggregate = windowedStream.aggregate(new org.apache.flink.api.common.functions.AggregateFunction<Event, Tuple3<String, Long, Long>, Tuple2<String, Long>>() {

            @Override
            public Tuple3<String, Long, Long> createAccumulator() {
                return Tuple3.of(null, 0L, 0L);
            }

            @Override
            public Tuple3<String, Long, Long> add(Event value, Tuple3<String, Long, Long> accumulator) {
                accumulator.setFields(value.user, accumulator.f1 + 1, accumulator.f2 + value.timestamp);
                return accumulator;
            }

            @Override
            public Tuple2<String, Long> getResult(Tuple3<String, Long, Long> accumulator) {
                return Tuple2.of(accumulator.f0, accumulator.f2 / accumulator.f1);
            }

            @Override
            public Tuple3<String, Long, Long> merge(Tuple3<String, Long, Long> a, Tuple3<String, Long, Long> b) {
                return null;
            }
        });


        aggregate.print("aggregate");

        env.execute();

    }
}
