package com.shujia.flink.state

import org.apache.flink.runtime.state.hashmap.HashMapStateBackend
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala._

object Demo2Checkpoint {
  def main(args: Array[String]): Unit = {

    //1、创建flink的运行环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //开启checkpoint, 定时将计算的状态持久化到hdfs中
    env.enableCheckpointing(2000)

    // 使用 externalized checkpoints，这样 checkpoint 在作业取消后仍就会被保留
    env.getCheckpointConfig.setExternalizedCheckpointCleanup(
      ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

    //设置checkpoint保存状态的hdfs的位置
    env.getCheckpointConfig.setCheckpointStorage("hdfs://master:9000/flink/checkpoint")

    //状态后端，先将状态保存再哪里
    //HashMapStateBackend:先将状态保存再内存中
    env.setStateBackend(new HashMapStateBackend())

    /**
     * 统计单词的数量
     */
    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)
    val wordsDS: DataStream[String] = linesDS.flatMap(_.split(","))
    val kvDS: DataStream[(String, Int)] = wordsDS.map(word => (word, 1))

    val keyBHyDS: KeyedStream[(String, Int), String] = kvDS.keyBy(kv => kv._1)

    /**
     * sum: 有状态算子，内部会保存计算的状态
     * sum底层使用的时ValueState
     */
    val countDS: DataStream[(String, Int)] = keyBHyDS.sum(1)

    countDS.print()
    //启动flink
    env.execute()
  }
}
