package com.shujia.rec.compute

import com.shujia.rec.common.Constants
import com.shujia.rec.entry.CaseClass.{CountEntry, LogEntry}
import com.shujia.rec.funaction.{CountProcessWindowFunction, TopNKeyedProcessFunction}
import com.shujia.rec.sink.RedisSink
import com.shujia.rec.util.{KafkaUtil, LogUtil}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.windowing.time.Time
object TopNProduce {
  def main(args: Array[String]): Unit = {

    /**
      * 计算最近一分钟最热门的前10个商品  每个5秒计算一次   基于事件时间计算
      *
      */


    val env = StreamExecutionEnvironment.getExecutionEnvironment

    //设置时间模式是事件时间
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    env.setParallelism(1)
    //获取kafka source
    val kafkaSource = KafkaUtil.getKafaSoure("TopNProduce", Constants.KAFKA_TOPIC)

    //读所有数据
    kafkaSource.setStartFromLatest()

    val logDS = env.addSource(kafkaSource)
    logDS.print()

    //过滤脏数据
    val filterDS = logDS.filter(log => LogUtil.verifyLog(log))

    //将每一行数据转换成实体类
    val entryDS = filterDS.map(log => LogUtil.toEntry(log))


    //指定事件时间字段  设置watemark
    val wk = new BoundedOutOfOrdernessTimestampExtractor[LogEntry](Time.seconds(Constants.LOGTOHBASE_EVENT_MAX_TIME_OUT)) {
      override def extractTimestamp(element: LogEntry): Long = {
        element.ts
      }
    }
    val wkDS = entryDS.assignTimestampsAndWatermarks(wk)


    val topNDS = wkDS
      .keyBy(_.proId)
      .timeWindow(Time.minutes(1), Time.seconds(5)) //窗口大小10分钟 滑动时间5秒
      .process(new CountProcessWindowFunction)
      .keyBy(_.end) //统计每个窗口中的topc10  按照窗口分流
      .process(new TopNKeyedProcessFunction) //计算topN


    //数据保存到redis
    topNDS.addSink(new RedisSink)

    env.execute("TopNProduce")

  }
}
