package data

import java.util.concurrent.TimeUnit

import org.apache.flink.api.common.restartstrategy.RestartStrategies
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.scala._
//import org.apache.flink.contrib.streaming.state.RocksDBStateBackend
import org.apache.flink.runtime.state.filesystem.FsStateBackend
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

object CheckpointApp {

  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.enableCheckpointing(5000)
    env.setParallelism(1)
    env.getCheckpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
    val stateBackend = new FsStateBackend("hdfs://hadoop000:8020/ruozedata-flink/state2")
    //val stateBackend = new FsStateBackend("file:///Users/andy/Documents/data/state")
    //val stateBackend = new RocksDBStateBackend("hdfs://ruozedata001:8020/ruozedata-flink/rocksdb-state",true)

    //stateBackend.setDbStoragePath("/home/hadoop/tmp/flink/rocksdb")  // 不指定则存java.io.tmpdir
    env.setStateBackend(stateBackend)

    env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
      2, // number of restart attempts
      Time.of(5, TimeUnit.SECONDS) // delay
    ))

    env.socketTextStream("localhost", 9527)
        .map(x => {
          if(x.contains("pk")) {
            throw  new RuntimeException("PK哥来了，快跑...")
          } else {
            x.toLowerCase()
          }
        }).flatMap(_.split(","))
        .map((_,1))
        .keyBy(x=>x._1)
        .sum(1).print()

    env.execute(getClass.getCanonicalName)
  }

}
