package com.shujia.flink.state

import org.apache.flink.api.common.functions.{ReduceFunction, RuntimeContext}
import org.apache.flink.api.common.state.{ReducingState, ReducingStateDescriptor}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

object Demo5ReducingState {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment


    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    val kvDS: DataStream[(String, Int)] = linesDS.flatMap(_.split(",")).map((_, 1))

    val keyByDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    val countDS: DataStream[(String, Int)] = keyByDS.process(new KeyedProcessFunction[String, (String, Int), (String, Int)] {


      var reducingState: ReducingState[Int] = _

      override def open(parameters: Configuration): Unit = {
        val context: RuntimeContext = getRuntimeContext

        val reduceStateDesc = new ReducingStateDescriptor[Int](
          "count",
          new ReduceFunction[Int] {
            override def reduce(x: Int, y: Int): Int = x + y
          },
          classOf[Int]
        )


        //获取聚合状态
        reducingState = context.getReducingState(reduceStateDesc)
      }


      override def processElement(
                                   value: (String, Int),
                                   ctx: KeyedProcessFunction[String, (String, Int), (String, Int)]#Context,
                                   out: Collector[(String, Int)]): Unit

      = {

        //向聚合状态中增加一个原始，在聚合状态的内部会自动进行聚合计算
        reducingState.add(1)

        //获取聚合的结果
        val count: Int = reducingState.get()

        //将数据发送到下游
        out.collect((value._1, count))
      }
    }

    )

    countDS.print()


    env.execute()

  }

}
