package com.ustcinfo.study.scala.r4.shihonghong

import org.apache.spark.{SparkContext, SparkConf}

/**
  * Created by shihonghong on 2018/8/3.
  */
object Test {
  def main(args: Array[String]) {
    val sparkConf = new SparkConf().setMaster("local").setAppName("Test")
    val sc = new SparkContext(sparkConf)
    //spark's property
    val txtRDD = sc.textFile("src/main/resources/sampleData/programmingGuide")
    //txt is transformed into RDD

    txtRDD
      .filter(x => x.trim().length > 0)   //delete the empty lines
      .map (x => x.split(" "))     //it split the line into arrays
      .flatMap(x => x)      //split x into arrays
      .map (x => (x,1))   //map x to (x,1)
      .reduceByKey(_ + _) // do reduce by key
      .map(x => (x._2,x._1)).sortByKey(false).take(10)  //change x1 with x2 and sort by key
      .foreach(println)   //printout every data



  }
}
