package com.yeming.flink.practice.window

import com.yeming.flink.practice.source.StationLog
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

object TestAggregateFunctionByWindow {

  //每隔五秒计算每个基站的日志数据
  def main(args: Array[String]): Unit = {

    //加载环境
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    streamEnv.setParallelism(1)
    //读取数据源
    import org.apache.flink.streaming.api.scala._
    val stream: DataStream[String] = streamEnv.socketTextStream("f1", 9999)
    //数据计算,
    val streamLog: DataStream[StationLog] = stream.map(line => {
      val arr: Array[String] = line.split(",")
      new StationLog(arr(0).trim, arr(1).trim, arr(2).trim, arr(3).trim, arr(4).trim.toLong, arr(5).trim.toLong)
    })

    streamLog.map(log => ((log.sid, 1)))
      .keyBy(_._1) //这里不能用索引，不能填0
      .window(TumblingProcessingTimeWindows.of(Time.seconds(5)))
      .process(new ProcessWindowFunction[(String, Int), (String, Long), String, TimeWindow] {//一个分组执行一次
        override def process(key: String, context: Context, elements: Iterable[(String, Int)], out: Collector[(String, Long)]): Unit = {
          println("--------")
          out.collect((key, elements.size))
        }
      }).print()

    streamEnv.execute()
  }
}
