import org.apache.spark.sql.SparkSession
object take{
  def main(args: Array[String]):Unit ={
    val spark=SparkSession.builder
      .appName(name= "LocaLHodeDemo")
      .master(master="local[*]")
      //本地模式,使用所有核心
      .getOrCreate()
    //获取 SparkContext
    val sc =spark.sparkContext
    try {
      //mapO方法
      val disData = sc.parallelize(List(1, 3, 45, 3, 76))
      val sq_dist = disData.map(x => x * x)
      println(sq_dist.collect.mkString(","))
    }
    finally {
      spark.stop()
    }
  }
}