package com.mjf.networkflow_analysis

import com.mjf.dim.{PvCount, UserBehavior}
import org.apache.flink.api.common.functions.{AggregateFunction, MapFunction}
import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor}
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

import scala.util.Random

/**
 * PV 实现
 */
object PageView {
  def main(args: Array[String]): Unit = {

    // 创建流处理执行环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(4)
    // 定义时间语义
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    // 从文件读取数据
    val inputStream: DataStream[String] = env.readTextFile("D:\\coding\\idea\\UserBehaviorAnalysis\\HotItemsAnalysis\\src\\main\\resources\\UserBehavior.csv")

    // 将数据转换为样例类，并且提取timestamp定义watermark
    val dataStream: DataStream[UserBehavior] = inputStream.map {
      line =>
        val dataArray: Array[String] = line.split(",")
        UserBehavior(dataArray(0).toLong, dataArray(1).toLong, dataArray(2).toInt, dataArray(3), dataArray(4).toLong)
    }.assignAscendingTimestamps(_.timestamp * 1000L)

    // 分配key,包装成二元组开窗聚合
    val pvStream: DataStream[PvCount] = dataStream
      .filter(_.behavior == "pv")
//      .map(data => ("pv", 1L))  // map成二元组（"pv", count）
      .map(new MyMapper())  // 自定义Mapper,将key均匀分配
      .keyBy(_._1)
      .timeWindow(Time.hours(1))
      .aggregate(new PvCountAgg(), new PvCountResult())

    // 把各分区的结果汇总起来
    val pvTotalStream: DataStream[PvCount] = pvStream
        .keyBy(_.windowEnd)
        .process(new TotalPvCountResult())

    pvTotalStream.print()

    env.execute("PageView")

  }
}

// 自定义MapFunction
class MyMapper extends MapFunction[UserBehavior, (String, Long)] {
  override def map(value: UserBehavior): (String, Long) = {
    (Random.nextInt(8).toString, 1)
  }
}

// 自定义ProcessFunction,将聚合结果按窗口合并
class TotalPvCountResult extends KeyedProcessFunction[Long, PvCount, PvCount] {
  // 定义一个状态,用来保存当前所有结果之和
  lazy val totalCountState: ValueState[Long] = getRuntimeContext.getState(new ValueStateDescriptor[Long]("total-count", classOf[Long]))

  override def processElement(value: PvCount, ctx: KeyedProcessFunction[Long, PvCount, PvCount]#Context, out: Collector[PvCount]): Unit = {
    totalCountState.update(totalCountState.value() + value.count) // 更新状态
    // 注册定时器，windowEnd + 1之后触发
    ctx.timerService().registerEventTimeTimer(value.windowEnd + 1)
  }

  override def onTimer(timestamp: Long, ctx: KeyedProcessFunction[Long, PvCount, PvCount]#OnTimerContext, out: Collector[PvCount]): Unit = {
    // 定时器触发时，所有分区count值都已到达，输出总和
    out.collect(PvCount(ctx.getCurrentKey, totalCountState.value()))
    totalCountState.clear()
  }
}


class PvCountAgg extends AggregateFunction[(String, Long), Long, Long] {
  override def createAccumulator(): Long = 0L

  override def add(value: (String, Long), accumulator: Long): Long = accumulator + 1

  override def getResult(accumulator: Long): Long = accumulator

  override def merge(a: Long, b: Long): Long = a + b
}

class PvCountResult extends WindowFunction[Long, PvCount, String, TimeWindow] {
  override def apply(key: String, window: TimeWindow, input: Iterable[Long], out: Collector[PvCount]): Unit = {
    out.collect(PvCount(window.getEnd, input.head))
  }
}