package com.galeno.test

import com.galeno.utils.SparkUtil
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/9/122:48
 */
object RDDCatch {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val rdd1 = sc.makeRDD(Seq(
      (Order(1, "柳坤", 200, "团购订单"), 1),
      (Order(2, "兴国", 300, "普通订单"), 2),
      (Order(3, "柳坤", 400, "团购订单"), 3),
      (Order(4, "广磊", 500, "普通订单"), 4),
      (Order(5, "兴国", 600, "普通订单"), 5)
    ))

    val rdd2: RDD[((Int, String), Int)] = rdd1.map(x => ((x._1.id, x._1.orderType), x._1.money))
    val rdd3 = rdd2.reduceByKey(_ + _)
    rdd3.foreach(println)


    rdd3.cache()
    rdd3.persist(StorageLevel.MEMORY_AND_DISK)

    //top3

    val rdd4 = rdd3.map(x => (-x._2, x._1))
    rdd4.takeOrdered(3).map(x=>(x._2,x._1))
    rdd4.foreach(println)

    //总金额








  }




}
case class Order(id: Int, name: String, money: Int, orderType: String)
