package com.shujia.flink.core

import org.apache.flink.api.common.functions.{RichFlatMapFunction, RuntimeContext}
import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

object Demo4StateWC {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val ds: DataStream[String] = env.socketTextStream("master", 7777)

    val kvDS: KeyedStream[(String, Long), String] = ds.flatMap(_.split(","))
      .map((_, 1L))
      .keyBy(_._1)


    val countDS: DataStream[(String, Long)] = kvDS.flatMap(new WcflatMapFuncation)

    countDS.print()

    env.execute()

  }
}

class WcflatMapFuncation extends RichFlatMapFunction[(String, Long), (String, Long)] {

  /**
    * 由于在前面进行了keyby  所在每一个key都会有一个状态
    *
    */

  var countState: ValueState[Long] = _

  override def open(parameters: Configuration): Unit = {
    //在open方法中可以获取到flink运行环境的对象
    // 通过Context 可以操作flink的时间  事件，状态
    val context: RuntimeContext = getRuntimeContext

    //定义状态对象
    val valueDesc: ValueStateDescriptor[Long] = new ValueStateDescriptor[Long]("average", createTypeInformation[Long])


    //将状态加入到flink上下文中  并返回状态的对象
    countState = context.getState(valueDesc)

  }

  /**
    * flatMap 方法 每一条数据执行一次
    *
    */

  var c = 0

  override def flatMap(value: (String, Long), out: Collector[(String, Long)]): Unit = {

    //获取之前状态中保存的数据
    val lastStat: Long = countState.value()


    //更新状态中保存的数据
    countState.update(lastStat + value._2)

    c+=1

    //返回结果
    out.collect((value._1, countState.value()))


  }
}
