package com.shujia.flink.state

import org.apache.flink.api.common.functions.{RichMapFunction, RuntimeContext}
import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala._

object Demo2ValueState {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment


    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    val kvDS: DataStream[(String, Int)] = linesDS.flatMap(_.split(",")).map((_, 1))

    val keyByDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    val countDS: DataStream[(String, Int)] = keyByDS.map(new RichMapFunction[(String, Int), (String, Int)] {


      /**
        * ValueState： 单值状态，为每一个key保存一个值
        * 保证在状态中的数据不会因为flink程序挂断而丢失
        * checkpoint不能再本地执行，需要将代码提交到服务器中，状态的数据需要保存再hdfs中
        *
        */
      var valueState: ValueState[Int] = _

      /**
        * open:方法在map之前执行，一般用于建立网络链接
        * 还用于初始化状态
        *
        */
      override def open(parameters: Configuration): Unit = {

        //1、获取flink的环境对象,通过环境初始化状态
        val context: RuntimeContext = getRuntimeContext

        //创建状态的描述对象，需要指定状态保存的数据类型和状体名
        val valueStateDesc = new ValueStateDescriptor[Int]("count", classOf[Int])

        //获取状态
        valueState = context.getState(valueStateDesc)
      }

      override def map(value: (String, Int)): (String, Int) = {

        /**
          * 使用状态统计单词的数量
          *
          */

        //获取之前的单词数量
        val count: Int = valueState.value()

        //计算新的数量
        val newCount: Int = count + 1

        //更新状态
        valueState.update(newCount)

        //返回数据
        (value._1, newCount)
      }
    })

    countDS.print()

    env.execute()

  }

}
