package cn.ipanel.bigdata.job.realtime

import cn.ipanel.bigdata.boot.Job
import cn.ipanel.bigdata.boot.config.{ConfigKey, Configuration}
import cn.ipanel.bigdata.boot.date.{Date, Minute}
import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.dw.realtime.{T_DEVICE_STATUS => T}
import cn.ipanel.bigdata.dw.ods.T_HDFS_LOG._
import cn.ipanel.bigdata.utils.Dictionary.Service
import cn.ipanel.bigdata.utils.Util.mapToI
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_shanxi
 * @date 2023/03/28 16:52
 * @description: 实时设备在线离线状态统计
 */
class Realtime1 extends Job {

  var now :Minute = _

  override def onStartup(): Unit = {
    val SECONDS = Configuration.getParam(ConfigKey.REALTIME_1_INTERVAL_SECONDS, "60").toInt
    val BOOTSTRAP_SERVERS = Configuration.getParam(ConfigKey.REALTIME_BOOTSTRAP_SERVERS)
    val GROUP_ID = Configuration.getParam(ConfigKey.REALTIME_1_GROUP_ID)
    val TOPIC = Configuration.getParam(ConfigKey.REALTIME_TOPICS)
    val MASTER = Configuration.spark.master
    val JOB_NAME = this.getClass.getSimpleName

    Logger.I("seconds: " + SECONDS)
    Logger.I("bootstrapServers: " + BOOTSTRAP_SERVERS)
    Logger.I("groupId: " + GROUP_ID)
    Logger.I("topic: " + TOPIC)
    Logger.I("master: " + MASTER)
    Logger.I("job_name: " + JOB_NAME)

    // 初始化sparkConf
    val conf = new SparkConf().setMaster(MASTER).setAppName(JOB_NAME)
    val ssc = new StreamingContext(conf, Seconds(SECONDS))

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> BOOTSTRAP_SERVERS,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      // 注意：1. 只要不更改group.id，每次重新消费kafka，都是从上次消费结束的地方继续开始(因为消费有提交offset)，不论"auto.offset.reset”属性设置的是什么
      //      2. 使用新的group.id，默认重最近offset处消费（相当于设置了("auto.offset.reset" -> "latest")；
      //         如果要重最前的offset处消费，必须设置 ("auto.offset.reset" -> "earliest")
      // 这一点跟使用命令 kafka-consumer-groups --bootstrap-server master:9092 --execute --reset-offsets --to-latest --topic lzz-test --group lzz-group
      // 来重置 offset 使后面重新消费是不一样的
      "group.id" -> GROUP_ID,
      "enable.auto.commit" -> (true: java.lang.Boolean)
    )
    val topics = Array(TOPIC.split(","): _*)
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )

    import spark.implicits._
    stream.foreachRDD(rdd => {
      val dataDf = rdd
        .map(x => x.value())
        .filter(x => Table.lineIsValid(x))
        .map(Table(_))
        // 所有原始数据
        .toDF()
        .withColumn(F_SERVICE, col(F_SERVICE).cast(IntegerType))
        // 错误码数据
        .filter(col(F_SERVICE).isin(Service.SERVICE_ONLINE, Service.SERVICE_OFFLINE))

      dataDf.persist()
      Logger.I(s"latest ${SECONDS}s record count: " + dataDf.count())
      if(!dataDf.isEmpty) {
        saveOnlineOfflineDevice(dataDf)
      }

      dataDf.unpersist()
    })
    ssc.start
    ssc.awaitTermination
  }

  def saveOnlineOfflineDevice(df: DataFrame): Unit = {
    T.save(
      df.filter(col(F_SERVICE).isin(Service.SERVICE_ONLINE, Service.SERVICE_OFFLINE))
        // 以最近的为准
        .withColumn("rank_num", row_number() over Window.partitionBy(F_DEVICE_ID).orderBy(col(F_TIMESTAMP).desc))
        .filter(col("rank_num") === lit(1))
        .withColumnRenamed(F_DEVICE_ID, T.F_DEVICE_ID)
        .withColumnRenamed(F_TIMESTAMP, T.F_UPDATE_TIME)
        .withColumn(T.F_STATUS, when(col(F_SERVICE) === lit(Service.SERVICE_ONLINE), lit(T.F_STATUS_ONLINE))
                                  .otherwise(lit(T.F_STATUS_OFFLINE)))
        .select(T.F_DEVICE_ID, T.F_STATUS, T.F_UPDATE_TIME)
    )
  }

}