import org.apache.spark.{SparkConf, SparkContext}

object scala4251 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("SparkGroupByKeyExample").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val rdd_1 = sc.parallelize(List(('a', 1), ('a', 2), ('b', 1), ('c', 1), ('c', 1)))
    val g_rdd = rdd_1.groupByKey()
    val groupedResult = g_rdd.collect()
    println("分组结果:")
    groupedResult.foreach(println)
    val countResult = g_rdd.map(x => (x._1, x._2.size)).collect()
    println("每组元素个数:")
    countResult.foreach(println)
    sc.stop()
  }
}    