package testChapter7


import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SPARK_BRANCH, SparkConf, SparkContext}

import java.sql.DriverManager

object Demo {
  def main(args: Array[String]): Unit = {
    val isLocal = args(0).toBoolean //本地运行,在edit conguration
    //sortBy
    val conf = new SparkConf().setAppName("helloSpark")
    if(isLocal){
      conf.setMaster("local[*]")
    }
    val sc = new SparkContext(conf)

    val arr = Array("hello java","hello scala")
    val arrrdd = sc.parallelize(arr)
    arrrdd.flatMap( x=>{x.split(" ")} ).
                  map(x=>{ (x,1) }).
                  reduceByKey( (x,y)=>{x+y}).
                  sortBy(x=>{x._2},true).
                  foreach(println(_))
    //(x,y)=>{x+y}相同的key，value相加,,x._2,第二个元素（value）进行排序,true表示降序，默认降序
    arrrdd.flatMap(_.split(" ")).
              map((_, 1)).
              reduceByKey(_ + _).
              sortBy(_._2).
              foreach(println(_))

  }

}
