package com.ecommerce.analysis
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._

class WeeklySalesTrendAnalyzer extends BaseAnalyzer {
  override def analyze(spark: SparkSession, cleanedData: DataFrame): DataFrame = {
    logger.info("===== 开始执行周销量趋势分析 =====")
    try {
      val result = cleanedData
        .withColumn("sale_date", to_date(col("InvoiceDate")))
        .withColumn("year", year(col("sale_date")))
        .withColumn("day_of_year", dayofyear(col("sale_date")))
        .withColumn("week_of_year", ((col("day_of_year") - 1) / 7).cast("int") + 1)
        .withColumn("year_week", concat(col("year"), lit("-"), col("week_of_year")))
        .groupBy("year_week", "year", "week_of_year")
        .agg(
          sum("Quantity").alias("weekly_quantity"),
          sum("TotalAmount").alias("weekly_revenue"),
          countDistinct("InvoiceNo").alias("weekly_orders")
        )
        .orderBy(col("year").asc, col("week_of_year").asc)
        .select("year_week", "weekly_quantity", "weekly_revenue", "weekly_orders")

      logger.info(s"周销量趋势分析完成，结果共 ${result.count()} 条记录")
      result.show(5, truncate = false)
      result
    } catch {
      case e: Exception =>
        logger.error("周销量趋势分析失败", e)
        throw e
    }
  }

  override def performSave(result: DataFrame, tableName: String, spark: SparkSession): Unit = {
    createTableIfNotExists(result, tableName, "year_week")
    super.performSave(result, tableName, spark)
  }
}