package com.zt.bigdata.flink.stream

import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time

/**
  * 三种方式实现word count
  */
object WindowWordCount {

  def main(args: Array[String]) {

    import org.apache.flink.api.java.utils.ParameterTool
    val parameters = ParameterTool.fromArgs(args)
    val host = parameters.get("host")
    val port = parameters.getInt("port")

    val env =
      StreamExecutionEnvironment.getExecutionEnvironment
    env.getConfig.setGlobalJobParameters(parameters)
    val text = env.socketTextStream(host, port)
    val counts = text.flatMap {
      _.toLowerCase.split("\\W+") filter (_.nonEmpty)
    }
      .map((_, 1l))
      .keyBy(0)
      .timeWindow(Time.seconds(5))
      //      1
      //            .fold(("", 0l))((acc, event) => {
      //              (event._1, event._2 + acc._2)
      //            })
      //      2
      //      .sum(1)
      //      3
      .aggregate(new AggregateFunction[(String, Long), (String, Long), (String, Long)] {
      override def createAccumulator(): (String, Long) = ("", 0l)

      override def add(value: (String, Long), accumulator: (String, Long)): (String, Long) = {
        (value._1, accumulator._2 + value._2)
      }

      override def getResult(accumulator: (String, Long)): (String, Long) = accumulator

      override def merge(a: (String, Long), b: (String, Long)): (String, Long) = {
        (a._1, a._2 + b._2)
      }
    })
    counts.print

    env.execute("Window Stream WordCount")
  }

}
