import org.apache.spark.{SparkConf, SparkContext}
object GroupByKeyRDD {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("SubtractRDD").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val rdd_1 = sc.parallelize(List(('a',1),('a',2),('b',1),('c',1),('c',1)))
    val g_rdd = rdd_1.groupByKey()

    g_rdd.collect.foreach {
      case (k, v) => println(s"(${k}, ${v.mkString(",")})")
    }

    val count_rdd = g_rdd.map {
      case (key, values) => (key, values.size)
    }

    count_rdd.collect.foreach {
      case (k, cnt) => println(s"(${k}, ${cnt})")
    }
  }

}
