package com.shujia.spark

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo9ReduceBykey {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setAppName("make")
      .setMaster("local")


    val sc = new SparkContext(conf)


    val studentsRDD: RDD[String] = sc.textFile("spark/data/students.txt")


    val kvRDD: RDD[(String, Int)] = studentsRDD.map(student => {
      val clazz: String = student.split(",")(4)
      (clazz, 1)
    })


    /**
      * reduceBykey 通过ley进行集合操作
      *
      * 会在map端进行预聚合
      *
      */

    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey((x, y) => x + y)


    countRDD.foreach(println)

  }

}
