package com.doit.spark.day01

import com.doit.spark.day01.utils.SparkUtil
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

/**
 * @DATE 2022/1/2/22:38
 * @Author MDK
 * @Version 2021.2.2
 * */
object C04_MakeRDD02 {
  def main(args: Array[String]): Unit = {
    //hdfs中创建RDD
    val sc: SparkContext = SparkUtil.getSc
    val rdd: RDD[String] = sc.textFile("hdfs://linux01:8020/1.txt")

    val rdd2: RDD[String] = rdd.flatMap(_.split("\\s+"))
    //foreach行动算子执行任务
//    rdd2.foreach(println)
    println(rdd.getNumPartitions)
    println(rdd2.getNumPartitions)
  }
}
