package org.example
import org.apache.spark.sql.SparkSession
object sparkData3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    //val list1=sc.makeRDD(List(60,70,90,85,78),1)
    //val list2=sc.parallelize(List(66,75,85,78,92),1)
    // val list3=sc.makeRDD(List('a','b','c','d','e'),1)


    val score = sc.makeRDD(List(("张三", 80), ("李四", 75), ("王五", 88), ("张三", 80)))
    //   score.filter(_._2<80).foreach(println)
    score.distinct().foreach(println)

    //  list1.foreach(println)
    //  list2.union(list2).foreach(println)
    // list3.zip(list2).foreach(println)
  }
}
