package com.shujia.spark.core

import com.shujia.spark.util.HDFSUtil
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo20Submit {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setAppName("submit")

    //提交到集群运行需要删掉master
    //conf.setMaster("local")

    val sc = new SparkContext(conf)

    //读取hdfs中的数据
    val studentRDD: RDD[String] = sc.textFile("/data/students.txt")

    //取出班级
    val kvRDD: RDD[(String, Int)] = studentRDD.map(line => {
      val split: Array[String] = line.split(",")
      (split(4), 1)
    })

    //统计班级的人数
    val classNumRDD: RDD[(String, Int)] = kvRDD.reduceByKey(_ + _)

    //整理数据
    val resultRDD: RDD[String] = classNumRDD.map {
      case (clazz: String, num: Int) =>
        s"$clazz\t$num"
    }

    //删除输出路径
    HDFSUtil.deletePath("/data/clazz_num")
    //将数据包到hdfs中
    resultRDD.saveAsTextFile("/data/clazz_num")

    /**
      * 将项目打包上传到集群
      *
      * spark-submit --class com.shujia.spark.core.Demo20Submit --master yarn-client spark-1.0.jar
      *
      */


  }

}
