import org.apache.sparkimport
org.apache.spark.sql.SparkSession
object TestDemo1 {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("TestDemo1")
      .getOrCreate()
    val sc=spark.sparkContext
    val distData=sc.parallelize(List(1,3,45,3,76))
    val sq_dist=distData.map(x => x*x)
    val data=sc.parallelize(List((1,3),(45,3),(7,6)))
    val sort_data=data.sortBy(x => x._2,false,1)
    println(sq_dist.collect().mkString("\t"))
    println(sort_data.collect().mkString("\t"))
    val two:PartialFunction[Int,String]={case 45 => "fourfive";case 3=> "three";case _ => "other"}
    val data1=sc.parallelize(List(1,8,3,45,3,88))
    println(data1.collect(two).collect.mkString("\t"))
    val test = sc.parallelize((List("How are you","I am fine","What about you")))
    println(test.collect().mkString(","))
    println(test.map(x => x.split(" ")).collect().mkString(","))
    println(test.flatMap(x => x.split(" ")).collect().mkString(","))  }}