package com.shujia.flink.core

import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

import scala.collection.mutable

object Demo8KeyByProcess {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    val wordsDS: DataStream[String] = linesDS.flatMap(_.split(","))

    val kvDS: DataStream[(String, Int)] = wordsDS.map((_, 1))

    val keyByDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    /**
     * keyBy之后使用process
     * keyBy： 将相同的key分到同一个task中
     *
     * KeyedProcessFunction
     *
     */
    val countDS: DataStream[(String, Int)] = keyByDS
      .process(new KeyedProcessFunction[String, (String, Int), (String, Int)] {
        //同一个并行度中是一个变量
        //var count: Int = 0
        //用于保存每个单词的数量
        val countMap: mutable.Map[String, Int] = new mutable.HashMap[String, Int]()

        /**
         * 将数据一行一行传递给processElement，processElement可以返回多条数据
         *
         * @param value ： 一行数据
         * @param ctx   ：上下文对象
         * @param out   ：用于将数据发送到下游
         */
        override def processElement(value: (String, Int),
                                    ctx: KeyedProcessFunction[String, (String, Int), (String, Int)]#Context,
                                    out: Collector[(String, Int)]): Unit = {
          //获取当前的key
          val key: String = ctx.getCurrentKey

          //从map中获取单词的数量，如果有就返回，如果没有就返回0
          var count: Int = countMap.getOrElse(key, 0)

          //做一个累计
          count += 1

          //更新map中的单词的数量
          countMap.put(key, count)

          //将数据发送到下游
          out.collect(key, count)
        }
      })

    countDS.print()

    env.execute()


  }

}
