package com.ww.flink

import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.configuration.Configuration
import org.apache.flink.runtime.state.filesystem.FsStateBackend
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.environment.CheckpointConfig
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}

/**
 * 1、每隔10s 计算最近Ns数据的wordcount
 */
object Flink_try12_time_window {
  case class CarInfo(carId: String, speed: Long)

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val initStream: DataStream[String] = env.socketTextStream("node01", 8888)
    val wordStream = initStream.flatMap(_.split(" "))
    val pairStream = wordStream.map((_, 1))
    val keyByStream = pairStream.keyBy(_._1)


    keyByStream.timeWindow(Time.seconds(5),Time.seconds(2))
      .reduce(
        new ReduceFunction[(String, Int)] {
          override def reduce(value1: (String, Int), value2: (String, Int)) = {
            (value1._1, value2._2 + value1._2)
          }
        }, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
          override def apply(key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = {
            out.collect(input.head)
          }
        }).print()
    env.execute()


  }


}
