//package org.apache.flink.state
//
//import org.apache.flink.api.common.state.{ListState, ListStateDescriptor}
//import org.apache.flink.api.scala._
//import org.apache.flink.runtime.state.{FunctionInitializationContext, FunctionSnapshotContext}
//import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction
//import org.apache.flink.streaming.api.functions.sink.SinkFunction
//import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
//
//import scala.collection.mutable.ListBuffer
//import scala.collection.JavaConversions._
//object OperatorStateApp {
//  def main(args: Array[String]): Unit = {
//    val env = StreamExecutionEnvironment.getExecutionEnvironment
//    env.setParallelism(1)
//    env.fromElements(
//      ("PK",31),
//      ("若泽",30),
//      ("星星",18),
//      ("J总",16),
//    ).addSink(new RuozedataSink)
//
//    env.execute(getClass.getCanonicalName)
//  }
//}
//
//class RuozedataSink(threshold:Int = 3) extends SinkFunction[(String,Int)] with CheckpointedFunction {
//  val buffer:ListBuffer[(String,Int)] = new ListBuffer[(String, Int)]()
//
//  var checkpointState:ListState[(String,Int)] = _
//
//  override def invoke(value: (String, Int), context: SinkFunction.Context[_]): Unit = {
//    buffer.add(value)
//
//    if(threshold == buffer.size) {
//      println("自定义输出:" + buffer)
//      buffer.clear()
//    }
//  }
//
//  override def snapshotState(context: FunctionSnapshotContext): Unit = {
//    checkpointState.clear()
//    for(ele <- buffer) {
//      checkpointState.add(ele)
//    }
//  }
//
//  override def initializeState(context: FunctionInitializationContext): Unit = {
//    val descriptor = new ListStateDescriptor[(String, Int)]("list", createTypeInformation[(String, Int)])
//
//    checkpointState = context.getOperatorStateStore.getListState(descriptor)
//
//    if(context.isRestored) {
//      for(ele <- checkpointState.get()) {
//        buffer.add(ele)
//      }
//    }
//  }
//}
//
