package com.baishancloud.log.directories

import cn.hutool.json.JSONUtil
import com.baishancloud.log.common.env.StreamEnv
import com.baishancloud.log.common.sink.SinkUtil
import com.baishancloud.log.common.source.SourceUtil
import com.baishancloud.log.format.scala.Origin
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import org.slf4j.{Logger, LoggerFactory}

import java.io.Serializable
import scala.beans.BeanProperty
import scala.collection.mutable

class StreamingDirectoriesAggregate extends Serializable

/**
 * @author ziqiang.wang
 * @date 2021/11/08 17:46
 */
object StreamingDirectoriesAggregate extends Serializable {

  val LOGGER: Logger = LoggerFactory.getLogger(classOf[StreamingDirectoriesAggregate])


  //聚合时间间隔，默认5分钟，单位：秒
  val aggInterval: String = "aggInterval"
  //输出数据时间间隔（窗口大小），默认1分钟，单位：秒
  val dataOutInterval: String = "dataOutInterval"
  //域名更新时间间隔，默认半小时，单位：秒
  val domainUpdateInterval: String = "domainUpdateInterval"
  //starRocks sink并行度，默认为1
  val sinkParallel: String = "sinkParallel"

  // 一级域名集合，后面要和账户的域名合并起来一起处理，这些域名统计一级目录的流量，其他域名统计二级目录的流量
  val levelOneDomains: Set[String] = Set[String](
    "cdn.cnbj1.fds.api.mi-img.com",
    "cdn.cnbj0.fds.api.mi-img.com",
    "cdn.awsbj0.fds.api.mi-img.com",
    "cdn.fds-ssl.api.xiaomi.com",
    "bak-cdn.fds.api.xiaomi.com",
    "cdn-thumb.fds.api.xiaomi.com",
    "cdn.cnbj0e.fds.api.mi-img.com",
    "cdn.fds.api.xiaomi.com",
    "cnbj0.fds.api.mi-img.com",
    "img-fds02.qicyc.com")

  case class LogRecord(@BeanProperty time_at: String, @BeanProperty domain_id: Int, @BeanProperty is_parent: Int, @BeanProperty directories: String, @BeanProperty request: Long, @BeanProperty traffic: Long) extends Serializable {
    def getKey: LogKey = {
      LogKey(time_at, domain_id, is_parent, directories)
    }
  }

  object LogRecord extends Serializable {
    def apply(key: LogKey, value: LogValue): LogRecord = {
      LogRecord(key.time_at, key.domain_id, key.is_parent, key.directories, value.request, value.traffic)
    }
  }

  case class LogKey(time_at: String, domain_id: Int, is_parent: Int, directories: String) extends Serializable

  case class LogValue(request: Long, traffic: Long) extends Serializable

  val domainIds: mutable.Map[String, Int] = mutable.Map[String, Int]()


  def main(args: Array[String]): Unit = {
    val parameterTool = ParameterTool.fromArgs(args)
    println("输入参数为：" + parameterTool.toMap.toString)
    StreamEnv.builder(parameterTool).build()
    val source1: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "1").uid("1caac0f2-7a71-47df-bae7-3512dc84a62f")
    val source2: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "2").uid("0ac5f36b-32f9-464d-91a2-adee4badfc3c")
    val source: DataStream[String] = source1.union(source2)
    source
      .map(Origin.parse(_).orNull).name("Origin").uid("ede6d7cf-44f7-4e3d-bb23-3bd06ecbce36")
      .filter(_ != null).name("!null").uid("291c780a-a284-4c8a-863a-b14a7b885126")
      //process：定时获取最新指定账户下域名和id信息，并返回需要的数据：LogRecord对象
      .process(new LogRecordParse(parameterTool)).name("LogRecordParse").uid("e7b1354a-dc99-4a69-9a1b-78599902d565")
      .keyBy(_.getKey)
      .window(TumblingProcessingTimeWindows.of(Time.seconds(parameterTool.getLong(dataOutInterval, 60))))
      .process(new ProcessWindowFunction[LogRecord, LogRecord, LogKey, TimeWindow]() {
        override def process(key: LogKey, context: Context, elements: Iterable[LogRecord], out: Collector[LogRecord]): Unit = {
          var requests: Long = 0
          var traffic: Long = 0
          elements.foreach(logRecord => {
            requests += logRecord.request
            traffic += logRecord.traffic
          })
          out.collect(LogRecord(key, LogValue(requests, traffic)))
        }
      }).name("DirectoriesAgg").uid("d9f33d46-ebd6-410e-9946-55fdce3e6ae0")
      .map(JSONUtil.toJsonStr(_)).name("toJsonStr").uid("997989ed-353b-40ba-a4ab-0b53abccb84a")
      .addSink(SinkUtil.starRocksJsonString(parameterTool)).setParallelism(parameterTool.getInt(sinkParallel, 1)).name("starRocks").uid("b577b0d2-7f2a-4361-81c4-5c8476366434")
    StreamEnv.execute(parameterTool)
  }

}

