package com.owen

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * @ClassName RddDemos
  * @Author Owen.Que
  * @Description
  * @Date 10/4/2018 17:09
  **/
object RddDemos extends App {

  val conf = new SparkConf().setAppName("SparkWordCount").setMaster("local")
  val sc = new SparkContext(conf)
  val rdd1 = sc.parallelize(3 to 7)
  val rdd2: RDD[Int] = sc.parallelize(1 to 10)
  val ints: Array[Int] = rdd1.union(rdd2).collect()
  println(ints.toList)
  val ints2: Array[Int] = rdd1.intersection(rdd2).collect()
  println(ints2.toList)
  val tuples1: Array[(Int, Iterable[Int])] = rdd1.union(rdd2).map((_, 1)).groupByKey().collect()
  println(tuples1.toList)
  val tuples2: Array[(Int, Int)] = rdd1.union(rdd2).map((_, 1)).reduceByKey(_ + _).collect()
  println(tuples2.toList)

  private val rdd3: RDD[(Int, Int)] = sc.parallelize(Array((1, 2), (1, 3), (2, 8)))
  private val rdd4: RDD[(Int, Int)] = sc.parallelize(Array((1, 5), (1, 6), (3, 9)))
  private val tuples3: Array[(Int, (Int, Int))] = rdd3.join(rdd4).collect()
  println(tuples3.toList)

  private val tuples4: Array[(Int, (Int, Option[Int]))] = rdd3.leftOuterJoin(rdd4).collect()
  println(tuples4.toList)

  private val tuples5: Array[(Int, (Option[Int], Int))] = rdd3.rightOuterJoin(rdd4).collect()
  println(tuples5.toList)

  private val tuples6: Array[(Int, (Iterable[Int], Iterable[Int]))] = rdd3.cogroup(rdd4).collect()
  println(tuples6.toList)

  private val tuples7: Array[((Int, Int), (Int, Int))] = rdd3.cartesian(rdd4).collect()
  println(tuples7.toList)
}
