package com.baishancloud.log.foglog.jh

import com.baishancloud.log.common.util.other.DataTimeFilterUtil
import com.baishancloud.log.foglog.{filterDomains, filterSecond, secondFilterDomains}
import com.baishancloud.log.format.scala.Origin
import org.apache.flink.api.common.functions.RichFilterFunction
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.configuration.Configuration

import java.util.{Objects, Timer, TimerTask}

/**
 * 对指定的域名进行最大时间过滤，该过滤适用于金华地区运行的任务
 *
 * @author ziqiang.wang 
 * @date 2022/2/15 11:47
 */
class DomainFilterJh(parameterTool: ParameterTool) extends RichFilterFunction[(String, Origin)] {

  // 只处理这些域名
  var allowDomains: Set[String] = _
  var allowOldest: Long = System.currentTimeMillis() / 1000 - 24 * 3600L

  // 要过滤的域名以及其最大时间
  var filterDomainsSet: Set[String] = _
  var maxSecond: Long = Long.MaxValue


  override def open(parameters: Configuration): Unit = {
    allowDomains = parameterTool.get(filterDomains, "").split(",").toSet
    new Timer("更新最大延迟时间", true).schedule(new TimerTask {
      override def run(): Unit = {
        allowOldest = System.currentTimeMillis() / 1000 - 10 * 24 * 3600L
      }
    }, 30 * 60 * 1000, 30 * 60 * 1000)

    filterDomainsSet = parameterTool.get(secondFilterDomains).split(",").toSet
    maxSecond = parameterTool.getLong(filterSecond)
  }

  var count: Long = 0L

  /**
   * 只需要特定域名的数据，并且数据延迟不能超过指定延迟<br>
   * 在原先的基础上，增加对指定域名的时间过滤，该类对应任务运行在金华地区服务器，指定时间为消费的最大时间，判断条件为 <
   */
  override def filter(value: (String, Origin)): Boolean = {
    val domain: String = value._2.domain()
    //如果域名为：v9.douyinvod.com，则对其进行1/1000取样，否则全部通过
    val sample: Boolean =
      if (Objects.equals(domain, "v9.douyinvod.com")) {
        count += 1
        if (count % 100 == 0) true else false
      } else {
        true
      }
    allowDomains.contains(domain) &&
      value._2.datetimeOfUTC().toEpochSecond >= allowOldest &&
      sample &&
      DataTimeFilterUtil.domainSecondFilterMax(value._2, filterDomainsSet, maxSecond)
  }
}
