package com.shujia.opt

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo3coalesce {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
    //指定spark执行默认，local：本地执行
    conf.setMaster("local")
    //spark 任务名
    conf.setAppName("wc")
    //创建spark上下文对象
    val sc = new SparkContext(conf)
    //生成小文件
    /* val studentsRDD: RDD[String] = sc.textFile("spark/data/students.csv")
     val repartitionRDD: RDD[String] = studentsRDD.repartition(100)
     repartitionRDD.saveAsTextFile("spark/data/students")*/

    //读取数据
    val studentRDD: RDD[String] = sc.textFile("spark/data/students")
    println(s"studentRDD分区数：${studentRDD.getNumPartitions}")

    //合并分区
    val conRDD: RDD[String] = studentRDD.coalesce(2, false)

    //统计班级人数
    conRDD
      .map(line => (line.split(",")(4), 1))
      .reduceByKey((x, y) => x + y)
      .foreach(println)

    while (true) {}

  }
}
