package org.niit.service

import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.dstream.DStream
import org.niit.bean.AdClickData
import org.niit.common.TService

import java.io.{FileWriter, PrintWriter}
import java.text.SimpleDateFormat
import java.util.Date
import scala.collection.mutable.ListBuffer

/**
 * Date:2025/6/5
 * Author：Ys
 * Description:
 */
class TimeCountService extends TService[DStream[AdClickData]]{

  override def dataAnalysis(data:DStream[AdClickData]): Unit = {


    /*
      统计一分钟内的数据，每10秒合并一次

      以秒为单位，需要将毫秒值转换成秒，并可作为key
     */
    val reduceDS: DStream[(Long, Int)] = data.map(line => {
      val newTs = line.ts.toLong / 10000 * 10000 //  将毫秒值转换成秒 并向上取整

      ((newTs, 1))

    }).reduceByKeyAndWindow(
      (x: Int, y: Int) => {
        x + y
      }, Seconds(60), Seconds(10)
    )

    reduceDS.foreachRDD(rdd=>{
      //新建一个列表，用来存储最终结果（格式化后的数据）
      val list = new ListBuffer[String]()
      //获取的数据，根据Key,进行升序排序，并返回一个数组、
      val datas = rdd.sortByKey(true).collect()
      //对该数据进行遍历，根据模式匹配对数据进行格式化
      datas.foreach{
        case (time,count)=>{
          //1.对时间进行格式化 只保留 时:分:秒
          val sdf = new SimpleDateFormat("HH:mm:ss")
          val timeStr = sdf.format(new Date(time.toLong)  )
          //组装数据 根据 [ {"xtime":"08:50:00","yval":"9" }, {"xtime":"08:50:10","yval":"10" }] 来进行组装
          list.append(s""" { "xtime" : "$timeStr", "yval" : "$count" } """)
        }
      }
        ///输出数据至 adclick.json
      val out = new PrintWriter( new FileWriter("data/adclick.json") )
      out.println("[" +list.mkString(",") + "]")
      out.flush()
      out.close()

    })


  }

}
