import org.apache.spark.sql.SparkSession
object fitel{
  def main(args: Array[String]):Unit ={
    val spark=SparkSession.builder
      .appName(name= "LocaLHodeDemo")
      .master(master="local[*]")
      //本地模式,使用所有核心
      .getOrCreate()
    //获取 SparkContext
    val sc =spark.sparkContext
    try {
      val rdd1 = sc.parallelize(List(('a',1),('b',2),('c',3)))
      rdd1.filter(_._2>1).collect
      rdd1.filter(x=>x._2>1).collect
      val filteredResult = rdd1.filter(_._2 > 1)
      filteredResult.collect
      filteredResult.collect().foreach(println)
    }
    finally {
      spark.stop()
    }
  }
}