package ScalaSpark

import org.apache.spark.{SparkConf, SparkContext}

object ScalaTransformationOperation {

  def main(args: Array[String]) {
    //map()
    //filter()
    //flatMap()
    //groupByKey()
    //reduceByKey()
    //sortByKey()
    join()
  }

  def map(): Unit = {
    val conf = new SparkConf().setAppName("map").setMaster("local")
    val sc = new SparkContext(conf)
    val numbers = Array(1, 2, 3, 4, 5)
    val numberRDD = sc.parallelize(numbers, 1)
    val multipleNumberRDD = numberRDD.map(num => num * 2)
    multipleNumberRDD.foreach(num => println(num))

  }

  def filter(): Unit = {
    val conf = new SparkConf().setAppName("filter").setMaster("local")
    val sc = new SparkContext(conf)
    val numbers = Array(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
    val numberRDD = sc.parallelize(numbers, 1)
    val multipleNumberRDD = numberRDD.filter(num => num % 2 == 0)
    multipleNumberRDD.foreach(num => println(num))
  }

  def flatMap(): Unit = {
    val conf = new SparkConf().setAppName("flatMap").setMaster("local")
    val sc = new SparkContext(conf)
    val lineArray = Array("hello you", "hello me", "hello world")
    val lines = sc.parallelize(lineArray, 1)
    val words = lines.flatMap(line => line.split(" "))
    words.foreach(word => println(word))
  }

  def groupByKey(): Unit = {
    val conf = new SparkConf().setAppName("groupByKey").setMaster("local")
    val sc = new SparkContext(conf)
    val scoreList = Array(new Tuple2[String, Integer]("class1", 80),
      new Tuple2[String, Integer]("class2", 88),
      new Tuple2[String, Integer]("class1", 80),
      new Tuple2[String, Integer]("class2", 90))
    val scores = sc.parallelize(scoreList, 1)
    val groupedScores = scores.groupByKey()
    groupedScores.foreach(
      score => {
        println(score._1)
        score._2.foreach(singleScore => println(singleScore))
        println("===========")
      }
    )
  }

  def reduceByKey(): Unit = {
    val conf = new SparkConf().setAppName("reduceByKey").setMaster("local")
    val sc = new SparkContext(conf)
    val scoreList = Array(new Tuple2[String, Integer]("class1", 80),
      new Tuple2[String, Integer]("class2", 88),
      new Tuple2[String, Integer]("class1", 80),
      new Tuple2[String, Integer]("class2", 90))
    val scores = sc.parallelize(scoreList, 1)
    val totalScores = scores.reduceByKey(_ + _)
    totalScores.foreach(classScore => println(classScore._1 + ":" + classScore._2))
  }

  def sortByKey(): Unit = {
    val conf = new SparkConf().setAppName("sortByKey").setMaster("local")
    val sc = new SparkContext(conf)
    val scoreList = Array(new Tuple2[Integer, String](90, "cat"),
      new Tuple2[Integer, String](80, "leo"),
      new Tuple2[Integer, String](80, "opp"),
      new Tuple2[Integer, String](55, "lll"))
    val scores = sc.parallelize(scoreList, 1)
    val totalScores = scores.sortByKey()
    totalScores.foreach(studentScore => println(studentScore._1 + ":" + studentScore._2))
  }

  def join(): Unit = {
    val conf = new SparkConf().setAppName("join").setMaster("local")
    val sc = new SparkContext(conf)

    //学生集合
    val studentList = Array(
      new Tuple2[Integer, String](1, "leo"),
      new Tuple2[Integer, String](2, "tom"),
      new Tuple2[Integer, String](3, "marry"))
    //分数集合
    val scoreList = Array(
      new Tuple2[Integer, Integer](1, 100), new Tuple2[Integer, Integer](2, 80),
      new Tuple2[Integer, Integer](3, 50), new Tuple2[Integer, Integer](1, 70),
      new Tuple2[Integer, Integer](2, 10), new Tuple2[Integer, Integer](3, 40))

    val student = sc.parallelize(studentList)
    val scores = sc.parallelize(scoreList)

    val studentScores = student.join(scores)
    studentScores.foreach(studentScore => println({
      System.out.println("student id：" + studentScore._1)
      System.out.println("student name：" + studentScore._2._1)
      System.out.println("student score：" + studentScore._2._2)
      System.out.println("==============")
    }))
  }

}
