package com.baishancloud.log.analyzer

import cn.hutool.json.JSONUtil
import com.baishancloud.log.analyzer.model.MediaType
import com.baishancloud.log.common.env.StreamEnv
import com.baishancloud.log.common.sink.SinkUtil
import com.baishancloud.log.common.source.SourceUtil
import com.baishancloud.log.common.util.StringUtil
import com.baishancloud.log.format.scala.Origin
import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.uaparser.scala.{CachingParser, Client}

import java.io.Serializable
import java.time.format.DateTimeFormatter
import java.time.{LocalDateTime, ZoneOffset}
import scala.language.postfixOps

/**
 *
 * @author ziqiang.wang
 * @date 2021/11/10 10:59
 */
object NeteaseAnalyzer extends Serializable {

  //账户，多个账户之间使用英文逗号分隔
  val accounts: String = "accounts"
  //输出数据时间间隔（窗口大小），默认1分钟，单位：秒
  val dataOutInterval: String = "dataOutInterval"
  //三个sink的并发度，默认为2，对于一个db来说，超过100并发则会报错。
  val sinkParallel: String = "sinkParallel"
  //网易指定目录
  private val neteaseDirectories = Array("/tpr/sc1live", "/tpr/wow/", "/tpr/sc2/", "/tpr/d3/", "/tpr/ovw/", "/tpr/pro/", "/tpr/Hero-Live-a/", "/tpr/hs/", "/hs-pod/")

  private def paramCheck(parameterTool: ParameterTool): Boolean = {
    if (!parameterTool.has(accounts)) {
      println(s"必须指定 --${accounts} 参数，表示需要哪个账户的域名数据")
      return false
    }
    true
  }

  def main(args: Array[String]): Unit = {
    val parameterTool: ParameterTool = ParameterTool.fromArgs(args)
    println(s"输入参数：${parameterTool.toMap.toString}")
    if (!paramCheck(parameterTool)) {
      throw new IllegalArgumentException("请根据提示信息检查输入参数是否准确")
    }
    StreamEnv.builder(parameterTool).build()
    val source1: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "1").uid("da175332-dee9-4e8b-bb43-d9cb4b52bcc6")
    val source2: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "2").uid("2cdd9839-ffe8-4b2f-aecc-9b32b4b7d50c")
    val source: DataStream[String] = source1.union(source2)
    val originAndId: DataStream[(Int, Origin)] = source
      .map(Origin.parse(_).orNull).name("Origin").uid("856f9d56-996c-4fda-912a-fbc72321e82f")
      .filter(_ != null).name("!=null").uid("3152e2d9-a832-4352-bbb3-e15d2863cd3a")
      .process(new DomainIdFilter(parameterTool)).name("DomainIdFilter").uid("72b323ec-b999-4c0b-a61d-26432861767d")

    //statis_user_agent_1hour
    userAgentAgg(parameterTool, originAndId)
    //statis_content_type_5min
    ContentTypeAgg(parameterTool, originAndId)
    //statis_domain_directory_traffic_5min
    domainDirectoryAgg(parameterTool, originAndId)
    StreamEnv.execute(parameterTool)
  }

  /**
   * 处理user agent 请求
   */
  private def userAgentAgg(parameterTool: ParameterTool, originAndId: DataStream[(Int, Origin)]): Unit = {
    originAndId
      .map(new MapFunction[(Int, Origin), (UserAgentKey, UserAgentValue)] {
        val cachingParser: CachingParser = CachingParser.default(100000)

        override def map(value: (Int, Origin)): (UserAgentKey, UserAgentValue) = {
          val format: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
          val origin: Origin = value._2
          val timeAt: String = LocalDateTime.ofEpochSecond(origin.datetimeOfUTC().toEpochSecond - origin.datetimeOfUTC().toEpochSecond % 3600, 0, ZoneOffset.ofHours(8)).format(format)
          val client: Client = cachingParser.parse(origin.userAgent())
          (UserAgentKey(StringUtil.subString(client.os.family, 100), StringUtil.subString(client.device.family, 100), value._1, timeAt), UserAgentValue(1L, origin.bytesSent()))
        }
      }).name("UserAgent:k-v").uid("beb01b3c-620c-4ebb-8f7d-a28fc849ad5f")
      .keyBy(_._1)
      .window(TumblingProcessingTimeWindows.of(Time.seconds(parameterTool.getLong(dataOutInterval, 60))))
      //将同一个key对应的value内各个属性直接相加
      .reduce((x, y) => (x._1, UserAgentValue(x._2.request + y._2.request, x._2.bytesSent + y._2.bytesSent))).name("userAgentAgg").uid("4636b9e5-f2b3-479e-b927-f581ecf0e74d")
      .map(x => JSONUtil.toJsonStr(UserAgent(x._1, x._2))).name("toJsonStr").uid("fb13b811-49f2-4424-8d41-a26dc8bc8747")
      .addSink(SinkUtil.starRocksJsonString(parameterTool, "1")).setParallelism(parameterTool.getInt(sinkParallel, 2)).name("statis_user_agent_1hour").uid("cf97483f-02d7-4100-b15a-0a0eb4fc9201")
  }


  /**
   * 处理content type聚合
   */
  private def ContentTypeAgg(parameterTool: ParameterTool, originAndId: DataStream[(Int, Origin)]): Unit = {
    originAndId
      .map(new MapFunction[(Int, Origin), (ContentTypeKey, ContentTypeValue)] {
        override def map(value: (Int, Origin)): (ContentTypeKey, ContentTypeValue) = {
          val format: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
          val origin: Origin = value._2
          val mediaType: MediaType = MediaType.parse(origin.contentType()).getOrElse(MediaType("other", "other", List(), "other/other"))
          val strMediaType = s"${mediaType.`type`}/${mediaType.subtype}"
          val timeAt: String = LocalDateTime.ofEpochSecond(origin.datetimeOfUTC().toEpochSecond - origin.datetimeOfUTC().toEpochSecond % 300, 0, ZoneOffset.ofHours(8)).format(format)
          (ContentTypeKey(StringUtil.subString(strMediaType, 100), value._1, timeAt), ContentTypeValue(1L, origin.bytesSent(), origin.responseTime()))
        }
      }).name("ContentType:k-v").uid("69e4fe42-2313-4a6a-b3ce-a4ed9aeccff0")
      .keyBy(_._1)
      .window(TumblingProcessingTimeWindows.of(Time.seconds(parameterTool.getLong(dataOutInterval, 60))))
      //将同一个key对应的value内各个属性直接相加
      .reduce((x, y) => (x._1, ContentTypeValue(x._2.requests + y._2.requests, x._2.bytesSent + y._2.bytesSent, x._2.responseTime + y._2.responseTime))).name("contentTypeAgg").uid("8685295b-660c-44e3-9d75-3c10752d89ef")
      .map(x => JSONUtil.toJsonStr(ContentType(x._1, x._2))).name("toJsonStr").uid("902b87e8-81b7-425a-91b4-d8ceb87f2958")
      .addSink(SinkUtil.starRocksJsonString(parameterTool, "2")).setParallelism(parameterTool.getInt(sinkParallel, 2)).name("statis_content_type_5min").uid("695f9d8e-cfd1-4a5a-ab6f-41cd5f73ee87")
  }

  /**
   * 处理域名目录聚合
   */
  private def domainDirectoryAgg(parameterTool: ParameterTool, originAndId: DataStream[(Int, Origin)]): Unit = {
    originAndId
      .map(new MapFunction[(Int, Origin), (DomainDirectoryKey, DomainDirectoryValue)] {
        override def map(value: (Int, Origin)): (DomainDirectoryKey, DomainDirectoryValue) = {
          val format: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
          val origin: Origin = value._2
          val timeAt: String = LocalDateTime.ofEpochSecond(origin.datetimeOfUTC().toEpochSecond - origin.datetimeOfUTC().toEpochSecond % 300, 0, ZoneOffset.ofHours(8)).format(format)
          //遍历数组，看uri是否以数组中的某个元素为开头，匹配到一次就结束
          neteaseDirectories.find(origin.uriWithoutQuery().startsWith(_)) match {
            case Some(directory) => (DomainDirectoryKey(value._1, directory, timeAt), DomainDirectoryValue(origin.bytesSent()))
            case None => (DomainDirectoryKey(value._1, "other", timeAt), DomainDirectoryValue(origin.bytesSent()))
          }
        }
      }).name("DomainDirectory:k-v").uid("37fe01f8-a368-4761-86b8-87e5d6e2ab55")
      .keyBy(_._1)
      .window(TumblingProcessingTimeWindows.of(Time.seconds(parameterTool.getLong(dataOutInterval, 60))))
      //将同一个key对应的value内各个属性直接相加
      .reduce((x, y) => (x._1, DomainDirectoryValue(x._2.bytesSent + y._2.bytesSent))).name("domainDirectoryAgg").uid("b5b0ecfe-3de2-4932-b553-968f7c318d35")
      .map(x => JSONUtil.toJsonStr(DomainDirectory(x._1, x._2))).name("toJsonStr").uid("542cfc14-1212-45d3-9503-401e3d0c0a9a")
      .addSink(SinkUtil.starRocksJsonString(parameterTool, "3")).setParallelism(parameterTool.getInt(sinkParallel, 2)).name("statis_domain_direcctory_traffic_5min").uid("e5d27846-4da8-4dc4-9fc4-030b0d79e566")
  }

}
