package com.atguigu.bigdata.spark

import org.apache.spark.{SparkConf, SparkContext}


object Spark02_Oper1 {

  def main(args: Array[String]): Unit = {
    //创建SparkConf
    //s设定spark计算框架的运行环境
    val config: SparkConf = new SparkConf().setMaster("local[*]").setAppName("wordCount")
    //创建Spark上下文环境
    val sc = new SparkContext(config)
//    map算子
    val listRDD = sc.makeRDD(1 to 10)
    val mapRDD = listRDD.map(x=>x*2)
//    source.map(_*2)两者一样
    mapRDD.collect().foreach(println)

  }
}
