package com.desheng.bigdata.flink.window

import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.scala.{DataStream, KeyedStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector


/**
  * 全窗口聚合函数
  *     将窗口所有的数据都汇聚齐之后再进行计算。
  *    ProcessWindowFunction 就是一个全窗口函数。
  *
  *    keyBy(position)
  *    keyBy(field)
  *         前两个keyby的返回值为[T, Tuple]
  *    keyBy(fun: T => K)
  *         该key的返回值为[T, K]
  */
object _04FullWindowOps {
    def main(args: Array[String]): Unit = {
        val env = StreamExecutionEnvironment.getExecutionEnvironment
        //设置时间语义
        env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)

        val lines: DataStream[String] = env.socketTextStream("bigdata01", 9999)

        val keyed: KeyedStream[(String, Int), String] = lines.flatMap(_.split("\\s+"))
            .map((_, 1))
        //这里的选择key需要特别的小心
            .keyBy(kv => kv._1)

        keyed.timeWindow(Time.seconds(4))
            .process(new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] {
                override def process(key: String,
                                     context: Context,
                                     //该window中的该key所对应的所有的数据
                                     elements: Iterable[(String, Int)],
                                     out: Collector[(String, Int)]): Unit = {
                    var sum =0
                    elements.foreach(kv=>{
                        sum += kv._2
                    })
                    out.collect(key, sum)
                }
            }).print()
        env.execute()
    }
}
