package com.shujia.road

import java.lang
import java.text.SimpleDateFormat
import java.util.{Date, Properties}

import com.shujia.bean.Car
import com.shujia.tf.CarMapper
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.configuration.Configuration
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend
import org.apache.flink.runtime.state.StateBackend
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.environment.CheckpointConfig
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import redis.clients.jedis.Jedis

object RealTimeRoadDayFlow {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment


    // 每 1000ms 开始一次 checkpoint
    env.enableCheckpointing(10000)

    // 高级选项：

    // 设置模式为精确一次 (这是默认值)
    env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)

    // 确认 checkpoints 之间的时间会进行 500 ms
    env.getCheckpointConfig.setMinPauseBetweenCheckpoints(500)

    // Checkpoint 必须在一分钟内完成，否则就会被抛弃
    env.getCheckpointConfig.setCheckpointTimeout(60000)

    // 同一时间只允许一个 checkpoint 进行
    env.getCheckpointConfig.setMaxConcurrentCheckpoints(1)

    // 开启在 job 中止后仍然保留的 externalized checkpoints
    env.getCheckpointConfig.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)

    // 允许在有更近 savepoint 时回退到 checkpoint
    env.getCheckpointConfig.setPreferCheckpointForRecovery(true)

    val stateBackend: StateBackend = new RocksDBStateBackend("hdfs://master:9000/car/checkpoint", true)
    env.setStateBackend(stateBackend)


    //读取卡口过车数据
    val properties = new Properties
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")
    properties.setProperty("group.id", "asdasdasd")

    //创建消费者

    val flinkKafkaConsumer = new FlinkKafkaConsumer[String](
      "cars",
      new SimpleStringSchema,
      properties)

    flinkKafkaConsumer.setStartFromEarliest()


    val carsDS: DataStream[String] = env.addSource(flinkKafkaConsumer)


    val carDS: DataStream[Car] = carsDS.map(new CarMapper)

    /**
      *
      * 2.1 实时统计每个道路当天总流量
      */

    //取出道路编号和时间
    val kvDS: DataStream[(Long, String, Long)] = carDS.map(car => {
      val road: Long = car.getRoad_id.toLong
      val time: Long = car.getTime.toLong

      val date = new Date(time * 1000)
      val format = new SimpleDateFormat("yyyyMMdd")
      val day: String = format.format(date)

      (road, day, 1L)
    })


    val keyByDS: KeyedStream[(Long, String, Long), (lang.Long, String)] = kvDS.keyBy(kv => (kv._1, kv._2))

    val countDS: DataStream[(Long, String, Long)] = keyByDS.sum(2)

    countDS.addSink(new RichSinkFunction[(Long, String, Long)] {

      var jedis: Jedis = _

      override def open(parameters: Configuration): Unit = {
        //1、创建redsi链接
        jedis = new Jedis("master", 6379)
      }

      override def close(): Unit = {
        jedis.close()
      }

      override def invoke(value: (Long, String, Long), context: SinkFunction.Context[_]): Unit = {
        val road: Long = value._1
        val day: String = value._2
        val num: Long = value._3


        val key = s"RealTimeRoadDayFlow:$day:$road"


        //将数据保存到redis
        jedis.set(key, num.toString)

        jedis.expire(key, 60 * 60)
      }
    })


    env.execute()


  }
}
