package com.baishancloud.log.traffic

import com.baishancloud.log.common.entity.DomainEntity
import com.baishancloud.log.common.util.ExternalDataFetchUtil
import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction
import org.apache.flink.util.Collector
import org.slf4j.{Logger, LoggerFactory}

import java.util.{Timer, TimerTask}
import scala.collection.mutable

/**
 * 转化结果对象中的域名为id
 *
 * @author ziqiang.wang 
 * @date 2021/12/22 16:12
 */
class ResultParse extends BroadcastProcessFunction[LogRecord, mutable.Map[String, DomainEntity], Result] {

  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[ResultParse])
  var domainInfo: mutable.Map[String, DomainEntity] = mutable.Map[String, DomainEntity]()
  var subtaskIndex: Int = -1

  override def open(parameters: Configuration): Unit = {
    subtaskIndex = getRuntimeContext.getIndexOfThisSubtask
    ExternalDataFetchUtil.domainInfoFetchFromFile(domainInfo)
    LOGGER.info(s"并行度${subtaskIndex}初始化域名信息成功，数量为:${domainInfo.size}")
  }


  override def processElement(value: LogRecord, ctx: BroadcastProcessFunction[LogRecord, mutable.Map[String, DomainEntity], Result]#ReadOnlyContext, out: Collector[Result]): Unit = {
    out.collect(
      Result(
        value.time_at_5min,
        domainInfo.getOrElse(value.domain, DomainEntity.DEFAULT).domainId,
        value.traffic
      )
    )
  }

  override def processBroadcastElement(value: mutable.Map[String, DomainEntity], ctx: BroadcastProcessFunction[LogRecord, mutable.Map[String, DomainEntity], Result]#Context, out: Collector[Result]): Unit = {
    if (value.nonEmpty) {
      domainInfo = value
      LOGGER.info(s"接收到广播的域名信息数据，大小为：${domainInfo}")
    }
  }
}
