package com.atguigu.pro

import java.net.URL

import org.apache.flink.api.common.functions.{AggregateFunction, MapFunction}
import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor}
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

import scala.util.Random

/**
 *
 * @description: 统计页面pv
 * @time: 2021-03-30 21:48
 * @author: baojinlong
 * */

case class UserBehavior2(userId: Long, itemId: Long, categoryId: Int, behavior: String, timestamp: Long)

// 定义输出pv统计的样例类
case class PvCount(windowEnd: Long, count: Long)

object PageView {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    // 设置时间语义为事件时间
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
    // 方便测试全局并行度为1
    env.setParallelism(4)

    val resource: URL = getClass.getResource("/UserBehavior-short.csv")
    // 从文件中读取数据
    // val inputStream: DataStream[String] = env.readTextFile(resource.getPath)
    // C:/codes/scala/FlinkTutorial/src/main/resources/UserBehavior.csv  E:/big-data/FlinkTutorial/src/main/resources/UserBehavior.csv
    val inputStream: DataStream[String] = env.readTextFile("C:/codes/scala/FlinkTutorial/src/main/resources/UserBehavior.csv")

    // 转换成样例类类型并提取时间戳和watermark
    val dataStream: DataStream[UserBehavior2] =
      inputStream
        .map(data => {
          val dataArray: Array[String] = data.split(",")
          UserBehavior2(dataArray(0).toLong, dataArray(1).toLong, dataArray(2).toInt, dataArray(3), dataArray(4).toLong)
        })
        .assignAscendingTimestamps(_.timestamp * 1000)

    //dataStream.print("dataStream")

    // 同一个key容易产生倾斜
    /*val resultStream: DataStream[PvCount] = dataStream
      .filter(x => "pv".equals(x.behavior))
      .map(_ => ("pv", 1.toLong)) // 定义一个pv字符串作为分组的哑key dummy key,按照这个数据的化并行度只能是1
      .keyBy(_._1) // 所有的数据都会被分到同一个组中
      .timeWindow(Time.hours(1)) // 1小时滚动窗口
      .aggregate(new PvCountAgg, new PvCountWindowResult)*/


    val resultStream: DataStream[PvCount] = dataStream
      .filter(x => "pv".equals(x.behavior))
      .map(new MyPageViewMapper)
      .keyBy(_._1)
      .timeWindow(Time.hours(1)) // 1小时滚动窗口
      .aggregate(new PvCountAgg, new PvCountWindowResult)
    // 上面resultStream还是需要做一个聚合统计
    val totalPvStream: DataStream[PvCount] = resultStream
      .keyBy(_.windowEnd)
      //.sum("count") // 如果这样的话则会在同一个windowEnd的count都会每次累加1
      .process(new MyTotalPvCountResult)

    //resultStream.print("resultStream")
    totalPvStream.print("totalPvStream")
    env.execute("pvJobTest")

  }
}


// 自定义预聚合函数 IN, ACC, OUT
class PvCountAgg extends AggregateFunction[(String, Long), Long, Long] {
  override def createAccumulator(): Long = 0

  override def add(in: (String, Long), acc: Long): Long = acc + 1

  override def getResult(acc: Long): Long = acc

  override def merge(acc: Long, acc1: Long): Long = acc + acc1
}

// 自定义窗口函数,拿到预聚合结果后操作 IN, OUT, KEY, W
class PvCountWindowResult extends WindowFunction[Long, PvCount, String, TimeWindow] {
  override def apply(key: String, window: TimeWindow, input: Iterable[Long], out: Collector[PvCount]): Unit = {
    //    input.foreach(item => {
    //      println(item + "-- " + input.head)
    //    })
    out.collect(PvCount(window.getEnd, input.head))
  }
}

// 自定义mapper,随机生成分组的key
class MyPageViewMapper extends MapFunction[UserBehavior2, (String, Long)] {
  override def map(t: UserBehavior2): (String, Long) = {
    (Random.nextString(10), 1)
  }
}

// K, I, O
class MyTotalPvCountResult extends KeyedProcessFunction[Long, PvCount, PvCount] {
  lazy val totalPvCountValueState: ValueState[Long] = getRuntimeContext.getState(new ValueStateDescriptor[Long]("totalPvCountValueState", classOf[Long]))

  // 每次来一条数据都会调用
  override def processElement(i: PvCount, context: KeyedProcessFunction[Long, PvCount, PvCount]#Context, collector: Collector[PvCount]): Unit = {
    // 获取上次状态数据
    val preValue: Long = totalPvCountValueState.value
    // 每次来一个数,将count之叠加到当前状态上
    totalPvCountValueState.update(preValue + i.count)
    // 注册一个windowEnd+1定时器
    context.timerService.registerEventTimeTimer(i.windowEnd + 1)
  }

  override def onTimer(timestamp: Long, ctx: KeyedProcessFunction[Long, PvCount, PvCount]#OnTimerContext, out: Collector[PvCount]): Unit = {
    // 获取到状态中数据
    val preValue: Long = totalPvCountValueState.value
    // out.collect(PvCount(timestamp - 1L, preValue))
    out.collect(PvCount(ctx.getCurrentKey, preValue))
    // 清空状态
    totalPvCountValueState.clear()
  }
}