package com.niit.service

import java.text.SimpleDateFormat

import com.niit.bean.AdClickData
import com.niit.dao.AreaCountDao
import org.apache.spark.streaming.dstream.DStream

/*
   需求：实时统计 每天 各地区 各城市 各广告 的点击总流量，并将其存入 MySQL。

    Key:(day,area,city,ad) value : count
 */
class AreaCountService {

  def dataAnalysis(data:DStream[AdClickData]): Unit ={

    //1.对kafka中的数据进行提取，提取Key  Key:(day,area,city,ad) value : count
    val mapDS: DStream[((String, String, String, String), Int)] = data.map(line => {
      val ts = line.ts
      val sdf = new SimpleDateFormat("yyyy-MM-dd")
      val day = sdf.format(new java.util.Date(ts.toLong))
      val area = line.area
      val city = line.city
      val adid = line.ad
      ((day, area, city, adid), 1)

    })

    //2.对相同的Key 进行合并，计算总的点击量
    val resDS: DStream[((String, String, String, String), Int)] = mapDS.reduceByKey(_ + _)

    //3.存入数据库
    resDS.foreachRDD(rdd=>{
      rdd.foreach{
        case ( (day,area,city,adid),count )=>{
          println(s"${day},${area},${city},${adid},${count}")
          // TODO 将数据添加到数据中
         val areaCountDao = new AreaCountDao
         areaCountDao.inserAreaCount(day,area,city,adid,count)
        }
      }
    })

  }

}
