package com.ecommerce.analysis

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._

class CountrySalesDistributionAnalyzer extends BaseAnalyzer {
  override def analyze(spark: SparkSession, cleanedData: DataFrame): DataFrame = {
    logger.info("===== 开始执行各国总销售额分布分析 =====")
    try {
      val totalRevenue = cleanedData.agg(sum("TotalAmount")).head().getDouble(0)

      val result = cleanedData
        .groupBy("Country")
        .agg(
          sum("TotalAmount").alias("country_revenue"),
          countDistinct("InvoiceNo").alias("order_count"),
          countDistinct("CustomerID").alias("customer_count")
        )
        .withColumn("revenue_ratio",
          col("country_revenue") / lit(totalRevenue) * 100
        )
        .orderBy(col("country_revenue").desc)

      logger.info(s"各国总销售额分布分析完成，结果共 ${result.count()} 条记录")
      result.show(5, truncate = false)
      result
    } catch {
      case e: Exception =>
        logger.error("各国总销售额分布分析失败", e)
        throw e
    }
  }

  override def performSave(result: DataFrame, tableName: String, spark: SparkSession): Unit = {
    createTableIfNotExists(result, tableName, "Country")
    super.performSave(result, tableName, spark)
  }
}