package com.shujia.flink.window

import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow

object Demo2CountWindow {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val source: KafkaSource[String] = KafkaSource
      .builder[String]
      //kafka broker集群列表
      .setBootstrapServers("master:9092")
      //执行消费的topic
      .setTopics("lines")
      //指定消费者组，一条数据在一个组内只被消费一次
      .setGroupId("shujia")
      //读取最新的数据
      .setStartingOffsets(OffsetsInitializer.latest())
      .setValueOnlyDeserializer(new SimpleStringSchema())
      .build

    //使用kafka source
    val kafkaDS: DataStream[String] = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source")

    val wordsDS: DataStream[String] = kafkaDS.flatMap(_.split(","))

    val kvDS: DataStream[(String, Int)] = wordsDS.map((_, 1))

    val keyByDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    /**
     * count window ：每隔几条数据计算一次
     * .countWindow(10)： 滚动的统计窗口
     * .countWindow(10, 5): 滑动的统计窗口
     *
     */
    val windowDS: WindowedStream[(String, Int), String, GlobalWindow] = keyByDS
      .countWindow(10, 5)

    val countDS: DataStream[(String, Int)] = windowDS.sum(1)

    countDS.print()

    env.execute()


  }

}
