package com.atguigu.pro

import java.sql.Timestamp
import java.util.Properties

import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.state.{ListState, ListStateDescriptor}
import org.apache.flink.api.java.tuple.{Tuple, Tuple1}
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.util.Collector

import scala.collection.mutable
import scala.collection.mutable.ListBuffer

/**
 * @description: xxx
 * @time: 2020/8/4 16:10
 * @author: baojinlong
 * */
object HotItems {
  def main(args: Array[String]): Unit = {
    // 创建一个流处理执行环境
    val environment: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    // 设置并行度
    environment.setParallelism(1)
    // 设置时间语义
    environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    // 从文件中读取数据
    val inputStream: DataStream[String] = environment.readTextFile("E:/big-data/FlinkTutorial/src/main/resources/UserBehavior-short.csv")

    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "localhost:9092")
    properties.setProperty("group.id", "consumer-group")
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("auto.offset.reset", "latest")

    // 从kafka数据源中读取数据
    val dataStreamFromKafka: DataStream[String] = environment.addSource(new FlinkKafkaConsumer[String]("hostitems", new SimpleStringSchema, properties))


    // 将数据转换成样例类类型,并且提取timestamp定义watermark
    val dataStream: DataStream[UserBehavior] = inputStream
      .map(data => {
        val dataArray: Array[String] = data.split(",")
        UserBehavior(dataArray(0).toLong, dataArray(1).toLong, dataArray(2).toInt, dataArray(3), dataArray(4).toLong)
      })
      .assignAscendingTimestamps(_.timestamp * 1000)
    // 对数据进行转换操作,过滤出pv行为,开窗聚合统计个数
    val aggStream: DataStream[ItemViewCount] = dataStream
      .filter(_.behavior.equals("pv")) // 过滤pv行为
      .keyBy("itemId") // 按照商品id分组
      .timeWindow(Time.hours(1), Time.minutes(5)) // 定义滑动窗口
      // 第一个参数为:增量聚合函数,每个数据进来后进行预聚合
      // 获取关窗后操作:第二个参数为全窗口函数,等窗口触发的时候调用这个全窗口函数,等窗口关闭时候就触发一次,它的输入是当前预聚合结果的输出
      .aggregate(new CountAgg(), new WindowResultFunction())

    // 对窗口聚合结果按照窗口进行分组,并做排序取TopN输出
    val result: DataStream[String] = aggStream
      .keyBy("windowEnd") // 按照窗口分组,收集当前窗口内的商品count数据
      // 处理每个窗口结束后的逻辑
      .process(new TopNHotItems(5))

    // result.print("result")
    result.print("result")
    environment.execute("hot items job")

  }
}

// 定义输入数据样例类
case class UserBehavior(userId: Long, itemId: Long, categoryId: Int, behavior: String, timestamp: Long)

// 定义窗口聚合结果的样例类
case class ItemViewCount(itemId: Long, windowEnd: Long, count: Long)

// IN, ACC, OUT
class CountAgg extends AggregateFunction[UserBehavior, Long, Long] {
  override def createAccumulator(): Long = 0L

  // 每来一条数据调用一次add,count值加一
  override def add(userBehavior: UserBehavior, acc: Long): Long = acc + 1

  override def getResult(acc: Long): Long = acc

  override def merge(acc1: Long, acc2: Long): Long = acc1 + acc2
}


// 用于输出窗口的结果 IN, OUT, KEY, W <: Window,全窗口函数
// def keyBy(firstField: String, otherFields: String*): KeyedStream[T, JavaTuple] = 所以这里的key是java元组,也可以根据变量的泛型定义推导得知
class WindowResultFunction extends WindowFunction[Long, ItemViewCount, Tuple, TimeWindow] {
  override def apply(key: Tuple, window: TimeWindow, aggregateResult: Iterable[Long],
                     collector: Collector[ItemViewCount]): Unit = {
    val itemId: Long = key.asInstanceOf[Tuple1[Long]].f0
    val count: Long = aggregateResult.iterator.next
    val tmpValue: ItemViewCount = ItemViewCount(itemId, window.getEnd, count)
    collector.collect(tmpValue)
    // println(tmpValue)
  }
}


// 自定义KeyedProcessFunction
class TopNHotItems(n: Int) extends KeyedProcessFunction[Tuple, ItemViewCount, String] {
  // 定义一个ListState,用来保存当前窗口所有的count结果
  lazy val itemCountListState: ListState[ItemViewCount] = getRuntimeContext.getListState(new ListStateDescriptor[ItemViewCount]("itemcount-list", classOf[ItemViewCount]))


  override def processElement(value: ItemViewCount, context: KeyedProcessFunction[Tuple, ItemViewCount, String]#Context, collector: Collector[String]): Unit = {
    // 每来一条数据都把它保存到状态中
    itemCountListState.add(value)
    // 注册定时器,在windowEnd+100触发,定时器id一样的话只会注册一个,不会重复注册
    context.timerService.registerEventTimeTimer(value.windowEnd + 100)

  }

  // 定时器触发时,从状态中获取数据,然后排序输出,可以认为所有窗口统计结果都已经到齐,可以排序输出了.
  override def onTimer(timestamp: Long, ctx: KeyedProcessFunction[Tuple, ItemViewCount, String]#OnTimerContext, out: Collector[String]): Unit = {
    // 先把状态中数据提取到ListBuffer中
    val allItemsCountList: ListBuffer[ItemViewCount] = ListBuffer()
    import scala.collection.JavaConversions._
    for (elem <- itemCountListState.get()) {
      allItemsCountList += elem
    }
    // 按照count值大小排序
    val sortedItemCountList: ListBuffer[ItemViewCount] = allItemsCountList.sortBy(_.count)(Ordering.Long.reverse).take(n)
    // 清楚状态
    itemCountListState.clear()
    // 将排名信息格式化成String,方便显示监控
    val result = new mutable.StringBuilder()
    result.append("时间:").append(new Timestamp(timestamp - 100)).append("\n")
    // 遍历sortedList输出结果
    for (i <- sortedItemCountList.indices) {
      // 获取当前商品的count信息
      val currentItemCount: ItemViewCount = sortedItemCountList(i)
      result.append("Top").append(i + 1).append(":")
        .append(" 商品id=").append(currentItemCount.itemId)
        .append(" 访问量=").append(currentItemCount.count)
        .append("\n")
    }
    result.append("===========\n\n")
    // 控制输出评率
    Thread.sleep(10000)
    out.collect(result.toString)

  }
}
