package com.simon.spark.transformations

import org.apache.spark.{SparkConf, SparkContext}

object TransformationsTest {
  def main(args: Array[String]): Unit = {

    val conf=new SparkConf().setAppName("transformation").setMaster("local[1]")

    val sc=new SparkContext(conf)

/*    val rdd1=sc.parallelize(Array(1,2,3,4,5,6,7,9,8))

    val rdd2=rdd1.map(_*2).sortBy(x=>x,true)

    rdd2.foreach(println(_))*/

/*    val rdd1 = sc.parallelize(List(5, 6, 4, 3))
    val rdd2 = sc.parallelize(List(1, 2, 3, 4))
    //求并集
    val rdd3 = rdd1.union(rdd2)
    //求交集
    val rdd4 = rdd1.intersection(rdd2)*/

/*    val rdd1 = sc.parallelize(List(("tom", 1), ("jerry", 3), ("kitty", 2)))
    val rdd2 = sc.parallelize(List(("jerry", 2), ("tom", 1), ("shuke", 2)))
    //求jion
    val rdd3 = rdd1.join(rdd2)

    rdd3.foreach(println(_))
    println("========1=========")
    val rdd4 = rdd1 union rdd2
    //按key进行分组
    rdd4.groupByKey.foreach(println(_))

    println("========2=========")
    rdd4.foreach(println(_))

    val rdd5 = rdd1.cogroup(rdd2)
    println("========3=========")
    rdd5.foreach(println(_))*/
val rdd1 = sc.parallelize(List(("tom", 1), ("jerry", 3), ("kitty", 2),  ("shuke", 1)))
    val rdd2 = sc.parallelize(List(("jerry", 2), ("tom", 3), ("shuke", 2), ("kitty", 5)))
    val rdd3 = rdd1.union(rdd2)
    println("========2=========")
    rdd3.foreach(println(_))
    //按key进行聚合
    val rdd4 = rdd3.reduceByKey(_ + _)
    rdd4.foreach(println(_))
    println("========4=========")

    val rdd5 =rdd4.map(t=>(t._2,t._1)).sortByKey(false).map(t=>(t._2,t._1))
    rdd5.foreach(println(_))
  }
}
