package org.niit.service

import org.apache.spark.streaming.dstream.DStream
import org.niit.bean.AdClickData
import org.niit.dao.AreaAdDao

import java.text.SimpleDateFormat
import java.util.Date

/**
 * Date:2025/6/5
 * Author：Ys
 * Description:
 */
class AreaCityAdCountService {


  def dataAnalysis(data: DStream[AdClickData]): Unit = {
    /*
    实时统计 每天 各地区 各城市 各广告 的点击总流量，并将其存入 MySQ
      Key : 时间，区域，城市，广告
      Value: 点击次数
      思路：将AdClickData 转换为 Key:时间，区域，城市，广告  Value:1
           相同的Key进行聚合
         可以不用transform转换成rdd
     */

    val mapDS: DStream[((String, String, String, String), Int)] = data.map(line => {
      val sdf = new SimpleDateFormat("yyyy-MM-dd")
      val day = sdf.format(new Date(line.ts.toLong))
      val area = line.area
      val city = line.city
      val adId = line.ad
      ((day, area, city, adId), 1)

    })

    val resDS: DStream[((String, String, String, String), Int)] = mapDS.reduceByKey(_ + _)

    resDS.foreachRDD(rdd => {
      rdd.foreach{
        case ((day, area, city, adId), count) => {
         // println(s"$day $area $city $adId $count")
          //调用 Dao层的方法
          val areaAdDao = new AreaAdDao()
          areaAdDao.insertAreaCityAdCount(day, area, city, adId, count)

        }
      }
    })

  }

}
