import org.apache.spark.sql.SparkSession
object SparkSortRdd{
  def main(args: Array[String]):Unit ={
    val spark=SparkSession.builder
      .appName(name= "LocaLHodeDemo")
      .master(master="local[*]")
      //本地模式,使用所有核心
      .getOrCreate()
    //获取 SparkContext
    val sc =spark.sparkContext
    try {
      //mapO方法
      val disData = sc.parallelize(List(1, 3, 45, 3, 76))
      val sq_dist = disData.map(x => x * x)
      //sortByO方法
      val data = sc.parallelize(List((1, 3), (45, 3), (7, 6)))
      val sort_data = data.sortBy(x => x._2, false, 1)
      //collectO方法,查看sq_dist和sort_data结果
      println(sq_dist.collect.mkString(","))
      println(sort_data.collect().mkString(", "))
      val one:PartialFunction[Int,String] = {case 1 =>"one";case _=>"other"}
      val datas = sc.parallelize(List(2,3,1))
      datas.collect(one).collect
      println(datas.collect().mkString(","))
    }
    finally {
      spark.stop()
    }
  }
}