package org.apache.spark.examples

import org.apache.spark.{SparkConf, SparkContext}
import scala.util.Random

object RDDGroupByKeyOperations {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("RDD GroupByKey Operations")
      .setMaster("local[4]")
    val sc = new SparkContext(conf)

    // 1. 基础groupByKey示例
    val salesRDD = sc.parallelize(List(
      ("apple", 5.0), ("banana", 3.0), ("apple", 4.0),
      ("orange", 2.5), ("banana", 4.0), ("apple", 6.0)
    ))
    println("\n=== 基础groupByKey示例 ===")
    println("原始销售数据：")
    salesRDD.collect().foreach(println)
    
    val groupedSales = salesRDD.groupByKey()
    println("\n按产品分组后的销售数据：")
    groupedSales.collect().foreach { case (product, prices) =>
      println(s"$product: ${prices.mkString(", ")}")
    }

    // 2. 分组后的聚合计算
    println("\n=== 分组后的聚合计算 ===")
    val salesStats = groupedSales.mapValues(prices => {
      val priceList = prices.toList
      val total = priceList.sum
      val count = priceList.size
      val avg = total / count
      val max = priceList.max
      val min = priceList.min
      (total, count, avg, max, min)
    })
    
    println("每种产品的销售统计：")
    salesStats.collect().foreach { case (product, (total, count, avg, max, min)) =>
      println(f"$product:")
      println(f"  总销售额: $total%.2f")
      println(f"  销售次数: $count")
      println(f"  平均价格: $avg%.2f")
      println(f"  最高价格: $max%.2f")
      println(f"  最低价格: $min%.2f")
    }

    // 3. 处理复杂值
    case class SaleRecord(price: Double, quantity: Int, date: String)
    val complexSalesRDD = sc.parallelize(List(
      ("apple", SaleRecord(5.0, 10, "2024-02-01")),
      ("banana", SaleRecord(3.0, 8, "2024-02-01")),
      ("apple", SaleRecord(4.0, 12, "2024-02-02")),
      ("orange", SaleRecord(2.5, 15, "2024-02-02")),
      ("banana", SaleRecord(4.0, 5, "2024-02-03")),
      ("apple", SaleRecord(6.0, 9, "2024-02-03"))
    ))
    
    println("\n=== 处理复杂值 ===")
    val groupedComplexSales = complexSalesRDD.groupByKey()
    println("按产品分组的详细销售记录：")
    groupedComplexSales.collect().foreach { case (product, records) =>
      println(s"\n$product:")
      records.foreach(r => 
        println(f"  日期: ${r.date} 价格: ${r.price}%.2f 数量: ${r.quantity}")
      )
    }

    // 4. 大数据量分组性能测试
    println("\n=== 大数据量分组性能测试 ===")
    val random = new Random(42)
    val products = Array("apple", "banana", "orange", "grape", "watermelon")
    val largeSalesRDD = sc.parallelize(1 to 1000000).map(_ => {
      val product = products(random.nextInt(products.length))
      val price = 2.0 + random.nextDouble() * 8.0
      (product, price)
    })
    
    println("大数据量分组前的分区数：" + largeSalesRDD.getNumPartitions)
    val groupedLargeSales = largeSalesRDD.groupByKey()
    println("分组后的统计信息：")
    groupedLargeSales.mapValues(prices => {
      val priceList = prices.toList
      (priceList.size, priceList.sum / priceList.size)
    }).collect().foreach { case (product, (count, avg)) =>
      println(f"$product: 销售次数=$count, 平均价格=$avg%.2f")
    }

    // 5. 自定义分区数的groupByKey
    println("\n=== 自定义分区数的groupByKey ===")
    val customPartitionedSales = salesRDD.groupByKey(2)
    println(s"自定义分区后的分区数: ${customPartitionedSales.getNumPartitions}")

    // 6. 分组后的数据转换
    println("\n=== 分组后的数据转换 ===")
    
    // 6.1 计算每个分组的价格区间
    val priceRanges = groupedSales.mapValues(prices => {
      val sortedPrices = prices.toList.sorted
      val segments = sortedPrices.grouped((sortedPrices.size + 2) / 3).toList
      segments.zipWithIndex.map { case (seg, idx) =>
        f"区间${idx + 1}: ${seg.mkString(", ")}"
      }
    })
    
    println("价格区间分析：")
    priceRanges.collect().foreach { case (product, ranges) =>
      println(s"\n$product:")
      ranges.foreach(println)
    }

    // 7. 分组数据的过滤
    println("\n=== 分组数据的过滤 ===")
    
    // 7.1 过滤出销售次数大于2的产品
    val frequentSales = groupedSales.filter { case (_, prices) => 
      prices.size > 2 
    }
    println("销售次数大于2的产品：")
    frequentSales.collect().foreach { case (product, prices) =>
      println(s"$product: ${prices.size}次销售")
    }

    // 8. 分组后的排序
    println("\n=== 分组后的排序 ===")
    
    // 8.1 按销售总额排序
    val sortedByTotal = groupedSales.mapValues(_.sum)
      .sortBy(_._2, ascending = false)
    
    println("按销售总额排序：")
    sortedByTotal.collect().foreach { case (product, total) =>
      println(f"$product: 总销售额=$total%.2f")
    }

    // 暂停以便查看Spark UI
    println("\n程序将暂停5分钟，请在此期间查看Spark UI...")
    Thread.sleep(300000)

    sc.stop()
  }
} 