package cn.doitedu.day04

import org.apache.spark.{SparkConf, SparkContext}

object T01_ShopCount {

  def main(args: Array[String]): Unit = {

    //1.创建SparkConf
    val conf = new SparkConf().setAppName("WordCount")
      .setMaster("local[4]") //如果提交到集群中运行，setMaster必须注释掉

    //2.创建SparkContext
    val sc = new SparkContext(conf)

    val lines = sc.textFile("data/shop.txt")

    lines.map(line => {
      val fields = line.split(",")
      val sid = fields(0)
      val dt = fields(1).substring(0, 7)
      val money = fields(2).toDouble
      ((sid, dt), money)
    }).reduceByKey(_+_)
      .map{
        case((sid, dt), money) => {
          (sid, (dt, money))
        }
      }.groupByKey()
      .flatMapValues(it => {
        val sorted = it.toList.sortBy(_._1)
        var sum = 0.0
        sorted.map(t => {
          sum += t._2
          (t._1, t._2, sum)
        })
      }).saveAsTextFile("out/out13")
  }

}
