package com.chb.flink.window


import com.chb.flink.source.{MyCustomerSource, StationLog}
import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

object TestAggregatFunctionByWindow {

    //每隔3秒计算最近5秒内，每个基站的日志数量
    def main(args: Array[String]): Unit = {
        val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
        import org.apache.flink.streaming.api.scala._


        // 自定义Source获取日志数据
        val stream = streamEnv.addSource(new MyCustomerSource)

        //开窗
        stream.map(log => ((log.sid, 1)))
            .keyBy(_._1)
            .window(SlidingProcessingTimeWindows.of(Time.seconds(5), Time.seconds(3)))
            .aggregate(new MyAggrateFunction(), new MyWindowFunction())
            .print()


        streamEnv.execute()
    }

    class MyAggrateFunction() extends AggregateFunction[(String, Int), Long, Long] {
        // 初始化累加器
        override def createAccumulator(): Long = 0

        // 每来一条数据，累加一次
        override def add(in: (String, Int), acc: Long): Long = acc + in._2

        // 返回结果
        override def getResult(acc: Long): Long = acc

        // 合并
        override def merge(acc: Long, acc1: Long): Long = acc + acc1
    }

    // 数据来自AggrateTionFuncation,
    class MyWindowFunction() extends WindowFunction[Long, (String, Long), String, TimeWindow] {
        // 在窗口结束时， 先执行AggragteFuncation的getResult, 然后执行apply
        override def apply(key: String, window: TimeWindow, input: Iterable[Long], out: Collector[(String, Long)]): Unit = {
            out.collect((key, input.iterator.next())) // input中只有一个值
        }
    }
}
