package com.atbeijing.bigdata.spark.core.rdd.operator.transform

import org.apache.spark.{SparkConf, SparkContext}

object Spark22_Oper_Transform {

    def main(args: Array[String]): Unit = {

        val conf = new SparkConf().setMaster("local[*]").setAppName("TransformOperator")
        val sc = new SparkContext(conf)

        // TODO 算子 - 转换 - KV - leftOuterJoin
        val rdd1 = sc.makeRDD(
            List(
                ("a", 1), ("b", 2), ("c", 3),("a", 3)
            )
        )
        val rdd2 = sc.makeRDD(
            List(
                ("a", 5), ("d", 6),("a", 4)
            )
        )
        // (a, (1,5)),(b,(2, _)),(c,(3,4))
//        rdd1.leftOuterJoin(rdd2).collect().foreach(println)
//        (a,(1,Some(5)))
//        (a,(1,Some(4)))
//        (a,(3,Some(5)))
//        (a,(3,Some(4)))
//        (b,(2,None))
//        (c,(3,None))

        // (a, (1, 5))(d, (_, 6))(c, (3, 4))
        //rdd1.rightOuterJoin(rdd2).collect().foreach(println)

        // cogroup => group + connect 分组 合并
        rdd1.cogroup(rdd2).collect().foreach(println)




        sc.stop()

    }

}
