package com.baishancloud.log.fm

import org.apache.flink.api.common.state.{StateTtlConfig, ValueState, ValueStateDescriptor}
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.util.Collector

/**
 * 累加前面窗口聚合结果，并输出<br>
 *
 * @author ziqiang.wang 
 * @date 2022/1/13 10:40
 */
class AggAndOut(parameterTool: ParameterTool) extends KeyedProcessFunction[LogKey, Log, Log] {

  var valueState: ValueState[LogValue] = _

  /**
   * 设置状态ttl，获取valueState状态
   */
  override def open(parameters: Configuration): Unit = {
    val description: ValueStateDescriptor[LogValue] = new ValueStateDescriptor[LogValue]("中间聚合结果", TypeInformation.of(classOf[LogValue]))
    val stateTtlConfig: StateTtlConfig = StateTtlConfig.newBuilder(Time.hours(parameterTool.getLong(stateTTL, 1)))
      .updateTtlOnCreateAndWrite()
      .neverReturnExpired()
      .cleanupFullSnapshot()
      .build()
    description.enableTimeToLive(stateTtlConfig)
    valueState = getRuntimeContext.getState(description)
  }

  /**
   * 和状态中的中间结果累加，并输出新结果
   */
  override def processElement(value: Log, ctx: KeyedProcessFunction[LogKey, Log, Log]#Context, out: Collector[Log]): Unit = {
    val middle: LogValue = valueState.value()
    if (middle == null) {
      valueState.update(value.getValue)
      out.collect(value)
    } else {
      val result = Log(value.getKey, LogValue(middle.request + value.request, middle.firstBagTime + value.firstBagTime, middle.traffic + value.traffic, middle.timeResponse + value.timeResponse))
      valueState.update(result.getValue)
      out.collect(result)
    }
  }
}
