package com.ecommerce.analysis

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._

class PriceSalesRelationAnalyzer extends BaseAnalyzer {
  override def analyze(spark: SparkSession, cleanedData: DataFrame): DataFrame = {
    logger.info("===== 开始执行商品单价与销量关系分析 =====")
    try {
      val result = cleanedData
        // 新增过滤条件：仅保留单价小于3000的数据
        .filter(col("UnitPrice") < 3000)
        .groupBy("StockCode", "Description", "UnitPrice")
        .agg(
          sum("Quantity").alias("total_quantity"),
          sum("TotalAmount").alias("total_revenue")
        )
        .withColumn("price_level",
          when(col("UnitPrice") <= 5, "低价")
            .when(col("UnitPrice") <= 20, "中价")
            .otherwise("高价")
        )
        .select(
          "StockCode", "Description", "UnitPrice", "price_level",
          "total_quantity", "total_revenue"
        )
        .orderBy(col("UnitPrice").asc)

      logger.info(s"单价与销量关系分析完成，结果共 ${result.count()} 条记录")
      result.show(5, truncate = false)
      result
    } catch {
      case e: Exception =>
        logger.error("单价与销量关系分析失败", e)
        throw e
    }
  }

  override def performSave(result: DataFrame, tableName: String, spark: SparkSession): Unit = {
    createTableIfNotExists(result, tableName, "StockCode")
    super.performSave(result, tableName, spark)
  }
}