package org.apache.flink.state

import java.util
import java.util.Collections

import org.apache.flink.api.common.functions.{ReduceFunction, RichFlatMapFunction}
import org.apache.flink.api.common.state.{ListState, ListStateDescriptor, MapState, MapStateDescriptor, ReducingState, ReducingStateDescriptor, ValueState, ValueStateDescriptor}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.api.scala._
import org.apache.flink.configuration.Configuration
import org.apache.flink.shaded.guava18.com.google.common.collect.Lists
import org.apache.flink.util.Collector

import scala.collection.JavaConversions._

/**
 * 需求：求和
 *
 *
 **/
object KeyedStateApp2 {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.fromCollection(List(
      (1L, 3L),
      (1L, 7L),
      (2L, 4L),
      (1L, 5L),
      (2L, 2L),
      (2L, 5L)
    )).keyBy(_._1)
        .flatMap(new RichFlatMapFunction[(Long,Long), (Long, Long)] {

          private var reducingState: ReducingState[Long] = _

          override def open(parameters: Configuration): Unit = {
            reducingState = getRuntimeContext.getReducingState(
              new ReducingStateDescriptor[Long]("sum",
                new ReduceFunction[Long] {
                  override def reduce(value1: Long, value2: Long): Long = value1 + value2
                }, classOf[Long])
            )
          }

          override def flatMap(value: (Long, Long), out: Collector[(Long, Long)]): Unit = {
            reducingState.add(value._2)
            out.collect(value._1,reducingState.get())
          }
        })
        .print()

    env.execute(getClass.getCanonicalName)
  }
}

