package com.zhao.sparkstreaming.sgg.code.handler

import java.text.SimpleDateFormat
import java.util.Date

import com.zhao.sparkstreaming.sgg.code.Ads_log
import org.apache.spark.streaming.Minutes
import org.apache.spark.streaming.dstream.DStream

/**
 * Description: 最近一小时广告点击量<br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/12/7 17:19
 * 思路分析:
 * 1)开窗确定时间范围
 * 2)在窗口内将数据转换数据结构为((adid,hm),count)
 * 3)按照广告id进行分组处理,组内按照时分排序
 *
 * @author 柒柒
 * @version : 1.0
 */

object LastHourAdCountHandler {

  //时间格式化对象
  private val sdf: SimpleDateFormat = new SimpleDateFormat("HH:mm")

  def getAdHourMintToCount(filterAdsLogDStream: DStream[Ads_log]) = {

    //1.开窗 =>时间间隔为1个小时window()
    val windowAdsLogDStream: DStream[Ads_log] = filterAdsLogDStream.window((Minutes(2)))

    //2.转换数据结构 ads =>((adid,hm),1L) map()
    val adHmToOneDStream: DStream[((String, String), Long)] = windowAdsLogDStream.map(adsLog => {
      val timestamp: Long = adsLog.timestamp
      val hm = sdf.format(new Date(timestamp))
      ((adsLog.adid, hm), 1L)
    })

    //3.统计总数((adid,hm),1L) =>((adid,hm),sum) reduceBykey
    val adHmToCountDStream: DStream[((String, String), Long)] = adHmToOneDStream.reduceByKey(_ + _)

    //4.转换数据结构 ((adid,hm),sum) => (adid,(hm,sum))
    val adToHmCountDStream: DStream[(String, (String, Long))] = adHmToCountDStream.map { case ((adid, hm), count) =>
      (adid, (hm, count))
    }

    //按照adid分组(adid,(hm,sum)) =>(adid,Iter[(hm,sum),....]) groupByKey
    adToHmCountDStream.groupByKey()
      .mapValues(iter => {
        iter.toList.sortWith(_._1 < _._1)
      })
  }
}
