package com.baishancloud.log.directories

import com.baishancloud.log.directories.StreamingDirectoriesAggregate.{LogRecord, aggInterval, levelOneDomains}
import com.baishancloud.log.format.scala.Origin
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.util.Collector
import org.slf4j.{Logger, LoggerFactory}

import java.time.format.DateTimeFormatter
import java.time.{LocalDateTime, ZoneOffset}
import java.util.{Timer, TimerTask}
import scala.collection.mutable

/**
 *
 * 转化日志数据为LogRecord对象，定时获取指定账户下最新域名及域名和id信息，斌根据并根据结果更新数据
 *
 * @author ziqiang.wang
 * @date 2021-11-07 15:06
 * */
class LogRecordParse(parameterTool: ParameterTool) extends ProcessFunction[Origin, LogRecord] {

  val LOGGER: Logger = LoggerFactory.getLogger(classOf[LogRecordParse])

  val domainIds: mutable.Map[String, Int] = mutable.Map[String, Int]()
  var subTaskIndex: Int = -1

  override def open(parameters: Configuration): Unit = {
    subTaskIndex = getRuntimeContext.getIndexOfThisSubtask
    FetchLatestDomainId.core(domainIds)
    LOGGER.info(s"LogRecordParse${subTaskIndex}初始化完成，domainIds内容为：\n${domainIds}")
    val timer = new Timer("update DomainIds", true)
    timer.scheduleAtFixedRate(new TimerTask {
      override def run(): Unit = FetchLatestDomainId.core(domainIds)
    }, 1800 * 1000, 1800 * 1000)
  }


  /**
   * 根据日志时间来计算所属的开始时间，比如5分钟的统计间隔，则：10:11:11 -> 10:10:00
   *
   * @param seconds     日志时间，秒
   * @param aggInterval 统计间隔，秒
   * @return 统计开始时间，字符串类型
   */
  private def timeAtCal(seconds: Long, aggInterval: Long): String = {
    val format: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
    LocalDateTime.ofEpochSecond(seconds - (seconds % aggInterval), 0, ZoneOffset.ofHours(8)).format(format)
  }

  override def processElement(value: Origin, ctx: ProcessFunction[Origin, LogRecord]#Context, out: Collector[LogRecord]): Unit = {
    if (domainIds.contains(value.domain())) {
      // 只有域名在需要的域名id信息表中存在，才进行处理
      val splits: Array[String] = value.urlWithoutQuery().split("/")
      val logRecord: LogRecord = LogRecord(
        timeAtCal(value.datetimeOfUTC().toEpochSecond, parameterTool.getLong(aggInterval, 300)), //日志时间，转为统计时间点
        domainIds(value.domain()), //域名对应id
        if (value.isParentLog) 1 else 0, //是否为父级日志
        if (levelOneDomains.contains(value.domain())) {
          //这些指定的域名取一级目录
          if (splits.length >= 4) {
            "/" + splits(3).substring(0, math.min(splits(3).length, 63))
          } else {
            ""
          }
        } else {
          //剩下的域名取二级目录
          if (splits.length >= 5) {
            "/" + splits(4).substring(0, math.min(splits(3).length, 63))
          } else {
            ""
          }
        }
        , //日志目录
        1,
        value.bytesSent()
      )
      out.collect(logRecord)
    }
  }
}
