package org.example

import org.apache.spark.sql.SparkSession

object data1_core {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
  val sc = spark.sparkContext
  val data1 = sc.parallelize(List(60,90,75,80,72),1)
  val data2 = sc.makeRDD(List("how are you","I am fine","think you"))
  val res1 = data1.map(x => x + 2).filter(_ >= 80).sortBy(y => y,false).take(1)
  val res2 = data2.flatMap(_.split(""))

  res1.foreach(System.out.println)
  res2.collect().foreach(println)
  sc.stop()
  }

}
