package com.haimalab.demo.kafka

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala._
import org.apache.flink.runtime.state.StateBackend
import org.apache.flink.runtime.state.filesystem.FsStateBackend
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010

object KafkaSourceJob {


  def main(args: Array[String]) {
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    // 开启检查点机制，并指定状态检查点之间的时间间隔
    env.enableCheckpointing(1000)
    // 其他可选配置如下：
    val config = env.getCheckpointConfig
    // 设置语义
    config.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
    // 设置两个检查点之间的最小时间间隔
    config.setMinPauseBetweenCheckpoints(500)
    // 设置执行Checkpoint操作时的超时时间
    config.setCheckpointTimeout(60000)
    // 设置最大并发执行的检查点的数量
    config.setMaxConcurrentCheckpoints(3)
    // 将检查点持久化到外部存储
    config.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
    // 如果有更近的保存点时，是否将作业回退到该检查点
    config.setPreferCheckpointForRecovery(true)

    val stateBackend: StateBackend = new FsStateBackend("file:///E:/tmp/checkpoint/")
    env.setStateBackend(stateBackend)

    val stream: DataStream[String] = getInput(env).setParallelism(2)


    stream.print()
    env.execute()

  }


  private def getInput(env: StreamExecutionEnvironment) = {
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "172.16.208.113:6667")
    properties.setProperty("group.id", "test" + System.currentTimeMillis())
    properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.put("auto.enable.commit", "false")

    val topic = "ngy-test"
    val myConsumer = new FlinkKafkaConsumer010[String](topic, new SimpleStringSchema(), properties)
    myConsumer.setStartFromEarliest() // start from the earliest record possible
    //    myConsumer.setStartFromLatest() // start from the latest record
    //    myConsumer.setStartFromTimestamp(System.currentTimeMillis()) // start from specified epoch timestamp (milliseconds)
    //    myConsumer.setStartFromGroupOffsets() // the default behaviour

    val stream = env.addSource(myConsumer)
    stream
  }
}
