import org.apache.spark.sql.SparkSession

object data1_core2 {


  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("Spark Pi")
      .getOrCreate()
    val sc =spark.sparkContext
    val data1=sc.parallelize(List(60,90,75,80,72),1)
    val data2=sc.makeRDD(List(72,80,75,66,95),1)
    data1.subtract(data2).foreach(println)
    data1.zip(data2).foreach(println)
  }

}
