package cn.ipanel.bigdata.dw.ods

import cn.ipanel.bigdata.boot.date.{Date, Day, Hour, Time}
import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.boot.source.genre.Hdfs
import cn.ipanel.bigdata.dw.Const
import cn.ipanel.bigdata.utils.Util.{arrToL, arrToS, nonEmpty}
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.spark.sql.types.{IntegerType, LongType, MapType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row}

import scala.collection.mutable

protected[ods] object HdfsLog extends Hdfs("stb") with Const {

  final val F_DATE_TIME     : String = "f_date_time"        // F_TIMESTAMP 转 yyyy-MM-dd HH:mm:ss 后的时间
  final val F_SERVICE       : String = "f_service"          // 上报类型
  final val F_TIMESTAMP     : String = "f_timestamp"        // 原始上报时间戳
  final val F_DEVICE_ID     : String = "f_device_id"        // 设备id，唯一
  final val F_EXTRA         : String = "f_extra"            // 扩展参数
  final val F_DATE          : String = "f_date"             // 日期


  def readToTable(day: Day = Date.asYesterday): DataFrame = {
    var df = spark.emptyDataFrame
    import spark.implicits._
    try {
      df = toRdd(s"${day.toDate}")
        .filter(Table.lineIsValid)
        .map(Table(_))
        .toDF()
    } catch {
      case e: Exception =>
        Logger.E(
          s""" read day[${day.toDate}] terminal log failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        df = spark.createDataFrame(spark.sparkContext.emptyRDD[Row], Table.SCHEMA)
    }

    df
  }


  def readToTableByHour(hour: Hour = Date.asHour.prev()): DataFrame = {
    var df = spark.emptyDataFrame
    import spark.implicits._
    try {
      df = toRddHours(hour)
        .filter(Table.lineIsValid)
        .map(Table(_))
        .toDF()
    } catch {
      case e: Exception =>
        Logger.E(
          s""" read hour[${hour.toFullHour}] terminal log failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        df = spark.createDataFrame(spark.sparkContext.emptyRDD[Row], Table.SCHEMA)
    }

    df
  }


  case class Table(var f_date_time: String,
                   var f_service: String,
                   var f_timestamp: Long,
                   var f_device_id: String,
                   var f_extra: Map[String, String],
                   var f_date: Int)

  object Table {

    lazy final val NULLABLE: Boolean = false
    lazy final val SCHEMA = {
      StructType(
        Seq(StructField(F_DATE_TIME, StringType, NULLABLE),
          StructField(F_SERVICE, StringType, NULLABLE),
          StructField(F_TIMESTAMP, LongType, NULLABLE),
          StructField(F_DEVICE_ID, StringType, NULLABLE),
          StructField(F_EXTRA, MapType(StringType, StringType), NULLABLE),
          StructField(F_DATE, IntegerType, NULLABLE)))
    }

    // 以 <[ 打头
    private[HdfsLog] final val VALID_REGEX = "(?<=<\\[).*?".r
    private[HdfsLog] final val BASICS_REGEX = "(?<=<\\[).*?(?=]>)".r
    private[HdfsLog] final val BASICS_SPLIT_SEP = ','
    private[HdfsLog] final val BASICS_EMPTY = Array("")
    private[HdfsLog] final val EXTEND_REGEX = "<([|&])><\\((.*?)(,)(.*?)\\)>".r
    private[HdfsLog] final val EXTEND_KEY_IDX = 2
    private[HdfsLog] final val EXTEND_VAL_IDX = 4

    private[HdfsLog] final val INDEX_SERVICE = 0
    private[HdfsLog] final val INDEX_TIMESTAMP = 1
    private[HdfsLog] final val INDEX_DEVICE_ID = 2

    private[HdfsLog] final val DEVICE_REGEX = """^\d+$""".r

    def lineIsValid(line: String): Boolean = VALID_REGEX.findFirstIn(line).nonEmpty

    def unapply(line: String): Option[(String, String, Long, String, Map[String, String], Int)] = {

      // 日志格式：<[serviceid,eventtime,deviceid]><|><(key-1,Value-1)><&>…<&><(key-n,Value-n)>

      val basics = BASICS_REGEX findFirstIn line match {
        case ret: Option[String] if ret.nonEmpty => ret.get.split(BASICS_SPLIT_SEP)
        case _ => BASICS_EMPTY
      }
      val extend = new mutable.HashMap[String, String]()
      (EXTEND_REGEX findAllMatchIn line).foreach(m => {
//        println("解析：" + m.group(0) + " " + m.group(1) + " "+ m.group(2) + " "+ m.group(3) + " "+ m.group(4) + " ")
        extend.put(m.group(EXTEND_KEY_IDX), m.group(EXTEND_VAL_IDX))
      })

      try {
        val timestamp: Long = arrToL(basics, INDEX_TIMESTAMP, INTX)
        val time: Time = Time(timestamp)
        Some(time.toYYYYMMddHHmmss,                                           // f_date_time
          arrToS(basics, INDEX_SERVICE, NONE),                                // f_service
          timestamp,                                                          // f_timestamp
          arrToS(basics, INDEX_DEVICE_ID, NONE),                              // f_device_id
          extend.toMap,                                                       // f_extra
          time.toDate)                                                        // f_date
      } catch {
        case e: Exception =>
          Logger.E(
            s""" Unapply Terminal Log[$line] Failed.
               | Because: ${ExceptionUtils.getStackTrace(e)}
               |""".stripMargin)
          Some(NONE, NONE, -1, NONE, Map(), -1)
      }
    }


    def apply(line: String): Table = line match {
      case Table(p1, p2, p3, p4, p5, p6) => Table(p1, p2, p3, p4, p5, p6)
      case _ => null
    }
  }

  def main(args: Array[String]): Unit = {
    val s = "<[0701,1677987757941,001122334455]><|><(C,0.25)><&><(M,0.33)>"
    val table = Table(s)
    println("valid: " + Table.lineIsValid(s))
    println(table.f_date_time)
    println(table.f_service)
    println(table.f_device_id)
    println(table.f_extra)
    println(table.f_date)
  }

}
