package com.jscloud.spark.scalacount

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RddFromCollection {
  def main(args: Array[String]): Unit = {
    //程序入口类 SparkContext
    val sparkConf: SparkConf = new SparkConf().setAppName("testRdd").setMaster("local[4]")
    val sc: SparkContext = new SparkContext(sparkConf)

    //从集合当中创建 RDD
    val rdd1: RDD[Int] = sc.parallelize(1.to(10), 1)

    //调用了 transformation 算子
    val rdd2: RDD[Int] = rdd1.map(x => x * 10)

    //打印输出
    rdd2.foreach(println(_))

    //其他创建方式
    val rdd3: RDD[String] = sc.parallelize(Array("hadoop", "spark", "flink"), 1)
    val rdd4: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4))

    sc.stop()


  }
}
