package com.doit.day01

import com.doit.beans.OrdersBean
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object TestOrders {

  def main(args: Array[String]): Unit = {


       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)

    val data = sc.textFile("data/orders/")

    val beans = data.map(line => {
      val arr = line.split(",")
      OrdersBean(arr(0), arr(1).toDouble, arr(2), arr(3))
    })


    val filtered: RDD[OrdersBean] = beans.filter(_.category.equals("A"))
    val cityMoney: RDD[(String, Double)] = filtered.map(bean => (bean.city, bean.money))
    val res = cityMoney.reduceByKey(_ + _)
    res.foreach(println)


 /*   val grouped: RDD[(String, Iterable[OrdersBean])] = filtered.groupBy(_.city)
    grouped.map(tp=>{
      val city = tp._1
      val sumMoney = tp._2.map(_.money).sum
      (city,sumMoney)
    }).foreach(println)*/

    sc.stop()

  }


}
