package com.linys.scala.qf.day06_spark

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 其他算子
  */
object ExeciseAggregate_06 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("ExeciseAggregate06").setMaster("local[*]")
    val sc = new SparkContext(conf)


    println("-------------------------------------------------------------------------------------------")
    // filterByRange
    val rdd1 = sc.parallelize(List(("e", 5), ("c", 3), ("d", 4), ("c", 2), ("a", 1)))
    val rdd2 = rdd1.filterByRange("c", "d")
    println(rdd2.collect.toBuffer)
    println("-------------------------------------------------------------------------------------------")
    // flatMapValues
    val rdd3 = sc.parallelize(List(("a", "1 2"), ("b", "3 4 5 6")))
    println(rdd3.flatMapValues(_.split(" ")).collect.toBuffer)
    println("-------------------------------------------------------------------------------------------")
    // foldByKey
    val rdd4 = sc.parallelize(List("dog", "wolf", "cat", "bear"), 2)
    val rdd5 = rdd4.map(x => (x.length, x))
    val rdd6 = rdd5.foldByKey("")(_+ "|" +_)
    println(rdd6.collect.toBuffer)
    println("-------------------------------------------------------------------------------------------")
    val rdd7 = sc.parallelize(List("JAVA C# SCALA SCALA JAVA JAVA", "C++ SCALA JAVA JAVA", "C++ SCALA JAVA PYTHON", "SCALA")).flatMap(_.split(" ")).map((_, 1))
    rdd7.foldByKey(0)(_+_)
    println(rdd7.foldByKey(0)(_+_).collect.toBuffer)
    println("-------------------------------------------------------------------------------------------")
    //keyBy
    val rdd8 = sc.parallelize(List("dog", "salmon", "salmon", "rat", "elephant"), 3)
    val rdd9 = rdd8.keyBy(_.length)
    println(rdd9.collect.toBuffer)
    println("-------------------------------------------------------------------------------------------")
    //keys values
    val rdd10 = sc.parallelize(List("dog", "tiger", "lion", "cat", "panther", "eagle"), 2)
    val rdd11 = rdd10.map(x => (x.length, x))
    println(rdd11.keys.collect.toBuffer)
    println(rdd11.values.collect.toBuffer)
    println("-------------------------------------------------------------------------------------------")
    //collectAsMap
    val rdd12 = sc.parallelize(List(("a", 1), ("b", 2)))
    println(rdd12.collectAsMap.toBuffer)


    sc.stop

  }

}
