package com.doit.spark.day03

import com.doit.spark.day01.utils.SparkUtil
import org.apache.spark.rdd.RDD

/**
 * @DATE 2022/1/5/14:51
 * @Author MDK
 * @Version 2021.2.2
 * */
object C07_订单数据案例 {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val rdd = sc.textFile("data/orders.txt")
    //处理行数据并进行封装
    val rdd2: RDD[(String, String, String, Double, Int)] = rdd.map(tp=>{
      val arr = tp.split(",")
      (arr(0),arr(1),arr(2),arr(3).toDouble,arr(4).toInt)
    })

    //按照数据类别分组
    val rdd3: RDD[(String, Iterable[(String, String, String, Double, Int)])] = rdd2.groupBy(_._2)
    /*
    * (生活用品,CompactBuffer((o003,生活用品,香皂,20.0,8), (o004,生活用品,牙刷,13.0,30), (o005,生活用品,杀虫剂,188.0,13)))
      (办公用品,CompactBuffer((o001,办公用品,打印机,2000.0,3), (o002,办公用品,笔记本,6000.0,2)))
      (上衣,CompactBuffer((o006,上衣,羽绒服,1999.0,2), (o007,上衣,衬衫,200.0,4), (o008,上衣,毛衣,88.0,3), (o009,上衣,羊毛衫,699.0,1)))
    *
    * */
//    rdd3.foreach(println)
    //1.求每种类别的订单数量
    val resRDD: RDD[(String, Int)] = rdd3.map(tp => (tp._1, tp._2.size))
    resRDD.foreach(println)

    println("--------------------------------------------------------------------")
    //2.求每种商品类别下的消费总金额
    rdd3.map(tp=>{
      val name = tp._1
      val sum = tp._2.map(tp => tp._4 * tp._5).sum
      (name,sum)
    }).foreach(println)

    println("--------------------------------------------------------------------")
    //3.每个商品类别下  每个商品的平均价格
    rdd3.map(tp=>{
      val category = tp._1
      val res = tp._2.map(tp => tp._4 * tp._5).sum / tp._2.map(tp=>tp._5).sum
      (category,res)
    }).foreach(println)
  }
}
