package com.ecommerce.analysis

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._

class CountryOrderReturnAnalyzer extends BaseAnalyzer {
  override def analyze(spark: SparkSession, cleanedData: DataFrame): DataFrame = {
    logger.info("===== 开始执行各国订单与退货关系分析 =====")
    try {
      val withReturnFlag = cleanedData
        // 新增过滤条件：排除Country为United Kingdom的数据
        .filter(col("Country") =!= "United Kingdom")
        .withColumn("is_return", col("InvoiceNo").like("%C%"))

      val result = withReturnFlag
        .groupBy("Country")
        .agg(
          countDistinct(when(not(col("is_return")), col("InvoiceNo"))).alias("normal_orders"),
          countDistinct(when(col("is_return"), col("InvoiceNo"))).alias("return_orders"),
          sum(when(not(col("is_return")), col("TotalAmount"))).alias("normal_revenue"),
          sum(when(col("is_return"), col("TotalAmount"))).alias("return_loss")
        )
        .withColumn("return_rate", col("return_orders") / col("normal_orders") * 100)
        .orderBy(col("return_rate").desc)

      logger.info(s"各国订单与退货关系分析完成，结果共 ${result.count()} 条记录")
      result.show(5, truncate = false)
      result
    } catch {
      case e: Exception =>
        logger.error("各国订单与退货关系分析失败", e)
        throw e
    }
  }

  override def performSave(result: DataFrame, tableName: String, spark: SparkSession): Unit = {
    createTableIfNotExists(result, tableName, "Country")
    super.performSave(result, tableName, spark)
  }
}