package cn.tedu.stream.checkpoint

import org.apache.flink.runtime.state.filesystem.FsStateBackend
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

object StreamCheckPointDemo {

  def main(args: Array[String]): Unit = {
    //hello flink  hello world
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //开启检查点
    env.enableCheckpointing(5000)
    //设置检查点的属性
    //生成快照影响的次数：精准影响一次
    env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
    //生成多少个检查点的目录
    env.getCheckpointConfig.setMaxConcurrentCheckpoints(1)

    //设置检查点的持久化方式
    env.setStateBackend(new FsStateBackend(args(0)))
    //env.setStateBackend(new FsStateBackend("hdfs://hadoop01:8020/ck/001"))
    /*env.setStateBackend(
      new FsStateBackend("file:///G:/workspace/CGBTN2110/FLINKSCALA/data/ck/001"))*/
    //如果取消任务将，数据保留下来
    env.getCheckpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)

    //指定数据源
    val source = env.socketTextStream("hadoop01", 9999)
    //hello flink  hello world
    import org.apache.flink.api.scala._
    val result: DataStream[(String, Int)] =
      source.flatMap(_.split(" ")).map((_, 1)).keyBy(0).sum(1)

    result.print()
    env.execute()

  }

}
