package com.shujia.flink.state

import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

object Demo6ValueState {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    val kvDS: DataStream[String] = linesDS.flatMap(_.split(","))

    //安装单词分组
    val keyByDS: KeyedStream[String, String] = kvDS.keyBy(word => word)

    /**
     * 使用valueState统计单词的数量
     */
    val countDS: DataStream[(String, Int)] = keyByDS
      .process(new KeyedProcessFunction[String, String, (String, Int)] {


        /**
         * ValueState: 单值状态，为每一个key保存一个值
         */
        var countState: ValueState[Int] = _

        //open方法在task启动的是偶执行，用于初始化状态
        override def open(parameters: Configuration): Unit = {
          //获取运行环境对象
          val context: RuntimeContext = getRuntimeContext
          //初始化状态
          val valueStateDesc = new ValueStateDescriptor[Int]("count", classOf[Int])
          countState = context.getState(valueStateDesc)
        }

        override def processElement(word: String,
                                    ctx: KeyedProcessFunction[String, String, (String, Int)]#Context,
                                    out: Collector[(String, Int)]): Unit = {
          //获取状态中保存的数据
          var count: Int = countState.value()

          //累计计算单词的数量
          count += 1

          //更新状态
          countState.update(count)

          //将统计结果发送到下游
          out.collect((word, count))
        }
      })
    countDS.print()

    env.execute()


  }

}
