package com.gome.han.bigdata.spark.core.rdd.partitionAndparallel

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author Hanpeng
 * @date 2021/1/12 21:21
 * @description:
 *  打印是乱的
 *
 *
 */
object CreateRDDParellFileHow4 {
  def main(args: Array[String]): Unit = {
    //TODO 准备环境
    val sparkConf:SparkConf = new SparkConf();
    sparkConf.setMaster("local[*]").setAppName("operationMap");
    val sparkContext:SparkContext = new SparkContext(sparkConf);
    //TODO 创建RDD
    val seq: Seq[Int] = Seq[Int](1, 2, 3, 4);
    val rdd: RDD[Int] = sparkContext.makeRDD(seq);
    val mapRdd: RDD[Int] = rdd.map(num => {
      println(">>>>>>> " + num);
      num;
    })
    val mapRdd1: RDD[Int] = mapRdd.map(num => {
      println("######## " + num);
      num;
    })

    mapRdd1.collect();
    // TODO 关闭环境
    sparkContext.stop()
  }
}
