package morefun.demo

import org.apache.spark.{SparkConf, SparkContext}

import scala.Tuple2

/**
  * 使用本地文件
  */
object LocalFile {

  // 1. 统计字母个数
  def charCount( sc:SparkContext ): Unit = {
    val lines = sc.textFile("file:/Users/wdy/tmp/spark.txt")
    val lineLen = lines.map(line => line.length())
    val totalNum = lineLen.reduce(_ + _)
    println("chars num = " + totalNum)
  }

  // 2. 统计每一行出现的次数
  def lineCount( sc:SparkContext ): Unit = {
    val lines = sc.textFile("file:/Users/wdy/tmp/spark.txt")
    val linesMap = lines.map(line => (line,1))
    linesMap.reduceByKey((x, y) => x + y).sortBy(_._2,false).foreach(println _);
  }

  // 3. map 中每个值都乘以2
  def multiNumbers( sc:SparkContext ): Unit = {
    val numbers = Array(1,2,3,4,5,6,7,8)
    val numRDD = sc.parallelize(numbers)
    val newNumRDD = numRDD.map( num => num*2)
    newNumRDD.foreach(println _)
  }

  // 4. filter 偶数
  def filterNumbers( sc:SparkContext ): Unit = {
    val numbers = Array(1,2,3,4,5,6,7,8)
    val numRDD = sc.parallelize(numbers)
    val newNumRDD = numRDD.filter(num => num%2 == 0)
    newNumRDD.foreach(println _)
  }

  // 5. 将行拆分成单词
  def flatWords( sc:SparkContext ): Unit = {
    val lines = sc.textFile("file:/Users/wdy/tmp/spark.txt")
    val words = lines.flatMap(line => line.split(" "))
    words.foreach(println _)
  }

  // 6. 按照班级把成绩归组
  def classScore( sc:SparkContext): Unit = {
    val scoresList = Array(Tuple2("class1", 90),Tuple2("class2",89),Tuple2("class3",44),Tuple2("class2",39),Tuple2("class1",9),Tuple2("class4",79))
    val classScore = sc.parallelize(scoresList).groupByKey().sortByKey()

    classScore.foreach( score => {
      print(score._1 + ": ")
      score._2.foreach(
      s => print( s + " " ))
      println("------------------")
    })
  }

  // 7. 统计每个班级的总分
  def classScoreSum( sc:SparkContext): Unit = {
    val scoresList = Array(Tuple2("class1", 90),Tuple2("class2",89),Tuple2("class3",44),Tuple2("class2",39),Tuple2("class1",9),Tuple2("class4",79))
    val classScore = sc.parallelize(scoresList).reduceByKey(_ + _).sortByKey()

    classScore.foreach( score => {
      print(score._1 + ": " + score._2)
      println("-----------------")
    })
  }


  // 8. 按照学生分数进行排序
  def sortScore( sc:SparkContext): Unit = {
    val scoresList = Array(Tuple2(90, "name1"),Tuple2(91, "name2"),Tuple2(44, "name3"),Tuple2(46, "name4"),Tuple2(80, "name5"),Tuple2(90, "name6"))
    val sortedScore = sc.parallelize(scoresList).sortByKey(false)

    sortedScore.foreach( score => {
      print(score)
      println("------------------")
    })
  }


  // 8. 学生学号、分数关联打印
  def studentInfo( sc:SparkContext): Unit = {
    val studentsList = Array(Tuple2(1, "name1"),Tuple2(2, "name2"),Tuple2(3, "name3"),Tuple2(4, "name4"),Tuple2(5, "name5"),Tuple2(6, "name6"))
    val scoresList = Array(Tuple2(1, 39),Tuple2(2, 88),Tuple2(3, 24),Tuple2(4, 99),Tuple2(5, 89),Tuple2(6, 67))
    val studentsRDD = sc.parallelize(studentsList)
    val scoreRDD = sc.parallelize(scoresList)

    val studentInfo = scoreRDD.join(studentsRDD).sortByKey( )

    studentInfo.foreach( info => {
      print("No." + info._1)
      print(" Name:" + info._2._1)
      print(" Score:" + info._2._2)
      println("------------------")
    })
  }


  // 9. 学生学号、多分数关联打印
  def studentInfo2( sc:SparkContext): Unit = {
    val studentsList = Array(Tuple2(1, "name1"),Tuple2(2, "name2"),Tuple2(3, "name3"),Tuple2(4, "name4"),Tuple2(5, "name5"),Tuple2(6, "name6"))
    val scoresList = Array(Tuple2(1, 39),Tuple2(2, 88),Tuple2(3, 24),Tuple2(4, 99),Tuple2(5, 89),Tuple2(6, 67),Tuple2(1, 45),Tuple2(2, 65),Tuple2(5, 67))
    val studentsRDD = sc.parallelize(studentsList)
    val scoreRDD = sc.parallelize(scoresList)

    val studentInfo = studentsRDD.cogroup(scoreRDD).sortByKey( )

    studentInfo.foreach( info => {
      print("Info." + info)
      print("No." + info._1)
      print(" Name:" + info._2._1)
      print(" Score:" + info._2._2)
      println("------------------")
    })
  }



  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("LocalFile").setMaster("local")
    val sc = new SparkContext(conf)

//    charCount(sc);
//    lineCount(sc);
//    multiNumbers(sc);
//    filterNumbers(sc);
//    flatWords(sc);
//    classScore(sc)
//    classScoreSum(sc);
//    sortScore(sc);
//    studentInfo(sc);
    studentInfo2(sc);
  }
}
