package cn.doitedu.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object T17_GroupByDemo{

  def main(args: Array[String]): Unit = {

    //1.创建SparkConf
    val conf = new SparkConf().setAppName("MapPartitionsWithIndexDemo")
      .setMaster("local[4]")

    //2.创建SparkContext
    val sc = new SparkContext(conf)

    val list = List(
      ("山东省", "济南市", 1000),  ("辽宁省", "沈阳市", 1000),  ("山东省", "烟台市", 2000),
      ("辽宁省", "本溪市", 1000), ("山东省", "济南市", 1000), ("河北省", "廊坊市", 2000),
      ("辽宁省", "沈阳市", 1000), ("山东省", "济南市", 1000), ("河北省", "廊坊市", 2000),
    )

    val rdd1: RDD[(String, String, Int)] = sc.parallelize(list)

    val grouped: RDD[(String, Iterable[(String, String, Int)])] = rdd1.groupBy(_._1)
    //val rdd2: RDD[(String, (String, Int))] = rdd1.map(t => (t._1, (t._2, t._3)))
    //val grouped = rdd2.groupByKey()

    grouped.saveAsTextFile("out/out23")


  }


}
