package com.doit.spark.day06

import com.doit.spark.day01.utils.SparkUtil
import org.apache.spark.rdd.RDD

/**
 * @DATE 2022/1/9/20:34
 * @Author MDK
 * @Version 2021.2.2
 *
 * shop1,2022-01-01,500
 * shop1,2022-01-02,500
 * shop1,2022-02-01,500
 * shop1,2022-04-01,500
 * shop1,2022-03-01,500
 * shop1,2022-06-01,500
 *
 * 店铺的月销售总额
 * name    月     月额     累计
 * shop1 2022-01  1000    1000
 * shop1 2022-02  500     1500
 * shop1 2022-03  500     2000
 * */
object Demo02 {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val shopRDD: RDD[String] = sc.textFile("data/shop/",1)
    //按照店铺名称分组
    val grouped = shopRDD.map(line => line.split(",")).groupBy(arr=>arr(0))
//    grouped.map(tp=>{tp._2.map(_.toList)}).foreach(println)

    //处理每个店铺的数据
    grouped.flatMap(tp =>{
      val name = tp._1
      val list = tp._2.toList
      val month_grouped = list.groupBy(arr => arr(1).substring(0, 7))
      val iters = month_grouped.map(tp => {
        val month = tp._1
        val sum_Money = tp._2.map(arr => arr(2).toInt).sum
        (name, month, sum_Money, 0)
      })


      //处理每个店铺每个月的总金额
      val sorted = iters.toList.sortBy(_._2)
      val tail = sorted.scan(("", "", 0, 0))((tp1, tp2) => (tp2._1, tp2._2, tp2._3, tp1._4 + tp2._3)).tail
      tail
    }).foreach(println)

  }
}
