package com.lizelinteam.main3

import org.apache.spark.sql.{SparkSession, functions}
import com.lizelinteam.utils.PathConfig

object work32 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("Daily Statistics")
      .getOrCreate()

    import spark.implicits._

    // 读取数据
    val df = spark.read
      .option("header", "true")
      .option("inferSchema", "true")
      .csv(PathConfig.inputPath)

    // 提取日期并统计
    val dailyStats = df
      .withColumn("date", functions.split($"time", " ")(0)) // 提取日期部分
      .groupBy("date")
      .count()
      .orderBy("date")

    // 输出结果
    dailyStats.write
      .mode("overwrite")
      .option("header", "true")
      .csv(PathConfig.output32Path)

    spark.stop()
  }
}