package com.ecommerce.analysis

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._
import com.ecommerce.db.MySQLHandler

class Top10ProductsAnalyzer extends BaseAnalyzer {
  override def analyze(spark: SparkSession, cleanedData: DataFrame): DataFrame = {
    logger.info("===== 开始执行销量最高的10个商品分析 =====")
    try {
      val result = cleanedData
        .groupBy("StockCode", "Description")
        .agg(
          sum("Quantity").alias("total_quantity"),
          sum("TotalAmount").alias("total_revenue"),
          countDistinct("InvoiceNo").alias("order_count")
        )
        .orderBy(col("total_quantity").desc)
        .limit(10)

      logger.info(s"销量最高的10个商品分析完成，结果共 ${result.count()} 条记录")
      result.show(5, truncate = false)
      result
    } catch {
      case e: Exception =>
        logger.error("销量最高的10个商品分析失败", e)
        throw e
    }
  }

  // 重写保存方法，添加表创建逻辑
  override def performSave(result: DataFrame, tableName: String, spark: SparkSession): Unit = {
    // 创建表并指定主键
    createTableIfNotExists(result, tableName, "StockCode")
    super.performSave(result, tableName, spark)
  }
}