package data.source

import java.util.Properties
import java.util.concurrent.TimeUnit

import org.apache.flink.api.common.restartstrategy.RestartStrategies
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.time.Time
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.api.scala._
import org.apache.flink.runtime.state.StateBackend
import org.apache.flink.runtime.state.filesystem.FsStateBackend
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}

object FlinkKafkaStateApp {
    def main(args: Array[String]): Unit = {
        val env = StreamExecutionEnvironment.getExecutionEnvironment
        env.setParallelism(1)
        // 容错
        env.enableCheckpointing(5000)
        val backend = new FsStateBackend("file:///Users/andy/Documents/data/state")
        env.setStateBackend(backend)
        env.getCheckpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, Time.of(5, TimeUnit.SECONDS)))
//env.getConfig.getGlobalJobParameters
        // Kafka参数相关的
        val properties = new Properties()
        val brokers = "hadoop000:9092"
        val topic = "flink9876"
        properties.setProperty("bootstrap.servers", brokers)
        properties.setProperty("group.id", "flink-kafka-111")
        properties.setProperty("auto.offset.reset", "earliest")
        properties.setProperty("enable.auto.commit", "false")
        val kafkaSource = new FlinkKafkaConsumer[String](topic, new SimpleStringSchema(), properties)
        // default=true false:状态只会提交到statebackend 不会提交到特殊的kafka topic(__consumer_offsets)
        //kafkaSource.setCommitOffsetsOnCheckpoints(false)  // 建议开启
        val stream = env.addSource(kafkaSource)

        /**
         * 1) 不带savepoint  默认是特殊的topic的offset上进行消费
         * 2) 带了savepoint  就是以savepoint所指定的路径的元数据进行消费
         */
        stream.flatMap(_.split(","))
            .map((_, 1))
            .keyBy(x => x._1)
            .sum(1)
            .print()

        env.socketTextStream("localhost", 9527)  // 控制中断
            .map(x => {
                if(x.contains("pk")) {
                    throw  new RuntimeException("PK哥来了，快跑...")
                } else {
                    x.toLowerCase()
                }
            }).print()

        env.execute("若泽数据Flink...")
    }
}
