package com.baishancloud.log.traffic

import cn.hutool.json.JSONUtil
import com.baishancloud.log.common.entity.DomainEntity
import com.baishancloud.log.common.env.StreamEnv
import com.baishancloud.log.common.sink.SinkUtil
import com.baishancloud.log.common.source.{BroadcastUtil, SourceUtil}
import com.baishancloud.log.common.util.TimeUtil
import com.baishancloud.log.format.scala.Origin
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.datastream.BroadcastStream
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time

import java.util.Objects
import scala.collection.mutable

/**
 *
 * @author ziqiang.wang
 * @date 2021/12/22 15:20
 */
object Toutiao302Traffic extends Serializable {

  def main(args: Array[String]): Unit = {
    val parameterTool: ParameterTool = ParameterTool.fromArgs(args)
    println("输入参数：" + parameterTool.toMap.toString)
    StreamEnv.builder(parameterTool).build()
    //    val source: DataStream[String] = SourceUtil.fileOnlyValue(parameterTool)

    val domainInfoBroadcast: BroadcastStream[mutable.Map[String, DomainEntity]] = BroadcastUtil.domainInfo()

    //雾分发日志处理，不对数据进行过滤
    val fog1: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "-fog-金华").uid("514c01e9-a1b9-4d64-bc3e-b8995568c73e")
    val fog3: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "-fog-喜鹊").uid("d5f649d8-ab10-4e3f-b223-2d9e87f15d60")
    val fog: DataStream[Origin] = fog1.union(fog3)
      .rebalance
      .map(Origin.parse(_).orNull).name("Origin").uid("632f7648-27f-43b9-8d9a-a21e9cb42ff5")
      .filter(_ != null).name("!=null").uid("8986f936-5561-46c3-845-5168ae0d52e5")

    //cache日志处理，需要对数据进行单独过滤
    val cache1: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "-cache-金华").uid("73ebe9c3-2a2e-487a-b9a2-a4c853599f51")
    val cache2: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "-cache-喜鹊").uid("7f3d5c36-397d-4892-8c25-109bbba93fce")
    val cache = cache1.union(cache2)
      .rebalance
      .map(Origin.parse(_).orNull).name("Origin").uid("80db7029-0365-e5d-b62e-dc274dd84f9d")
      .filter(_ != null).name("!=null").uid("ef530ac3-23d2-467e-b9a-dd9fe61e786e")
      .filter(x => {
        Objects.equals(x.xPeer(), "-") && x.url() != null && x.url().contains("bsxdisp=se")
      }).name("someFilter").uid("37f7d6a3-c340-4145-afef-022fb96b00e")

    //将雾分发和cache日志合并处理
    fog.union(cache)
      .map(o => LogRecord(
        TimeUtil.timeAtString(o.datetimeOfUTC().toEpochSecond, 5 * 60),
        o.domain(),
        o.traffic()
      )).name("LogRecordParse").uid("276c92b4-6c28-4ec0-98e5-7430e962e9a")
      .process(new MiniBatchProcess(parameterTool)).name("localAgg").uid("295238f2-b33e-4b52-a30-fe18314ae077")
      .keyBy(_.getKey)
      .window(TumblingProcessingTimeWindows.of(Time.minutes(parameterTool.getLong(windowSize, 5))))
      .reduce((x, y) => LogRecord(x.time_at_5min, x.domain, x.traffic + y.traffic)).name("trafficAgg").uid("a29ea3a-efeb-4319-87fd-ad8e367d820a")
      .connect(domainInfoBroadcast)
      .process(new ResultParse).name("ResultParse").uid("9314d3c-dc5d-4e76-ba0f-829339b5425")
      .map(JSONUtil.toJsonStr(_)).name("toJsonStr").uid("89f8d867-cdab-4d6c-b99d-dffb387bf55")
      .addSink(SinkUtil.starRocksJsonString(parameterTool)).setParallelism(1).name("starRocks").uid("e0c3205-c718-483e-8839-d04a35340fbb")
    //      .print("结果数据：")
    StreamEnv.execute(parameterTool)
  }

}
