package com.analysis

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object SalesTrend {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("Sales Trend Analysis")
      .master("local[*]")
      .config("spark.sql.legacy.timeParserPolicy", "LEGACY") // 兼容旧时间格式
      .getOrCreate()

    val cleanedDF = spark.read.parquet("output/cleaned_data.parquet")

    // 按周分析销售趋势
    val trendDF = cleanedDF
      .withColumn("Week", weekofyear(col("InvoiceDate")))
      .groupBy("Week")
      .agg(
        sum("TotalPrice").as("WeeklyRevenue"),
        countDistinct("InvoiceNo").as("OrderCount"),
        avg("TotalPrice").as("AvgOrderValue")
      )
      .orderBy("Week")

    // 保存为JSON供前端使用
    trendDF.write
      .mode("overwrite")
      .json("output/analysis_results/sales_trend")

    spark.stop()
  }
}