package com.xiaohu.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo13foreach {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("MapValues算子演示")

    val sc: SparkContext = new SparkContext(conf)

    val linesRDD: RDD[String] = sc.textFile("spark/data/students.txt")

    val rdd1: RDD[Array[String]] = linesRDD.map((e: String) => {
      e.split(",")
    })

    val rdd2: RDD[(String, String, String, String, String)] = rdd1.map {
      case Array(id: String, name: String, age: String, gender: String, clazz: String) => (id, name, age, gender, clazz)
    }

    /**
     * 行动算子，就可以触发一次作业执行，有几次行动算子调用，就会触发几次
     *
     * rdd是懒加载的性质
     */
    //    rdd2.foreach(println)
    //    println("====================================")
    //    rdd2.foreach(println)


    println("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$") // 一定会打印，不属于spark作业中的语句
    val rdd3: RDD[(String, String, String, String, String)] = rdd2.map((t5: (String, String, String, String, String)) => {
      println("===============================")
      t5
    })
    println("#############################")

    rdd3.foreach(println)

    while (true) {

    }

  }
}
