package com.doit.day02

import com.doit.beans.{Orders, User}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo05Join {
  def main(args: Array[String]): Unit = {

       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)

    val rdd1 = sc.textFile("data/orders/order.csv")
    val rdd2 = sc.textFile("data/orders/user.csv")

    val ordersRDD = rdd1.map(line => {
      val arr = line.split(",")
      Orders(arr(0), arr(1).toDouble, arr(2), arr(3), arr(4))
    })


    val userRDD = rdd2.map(line => {
      val arr = line.split(",")
      User(arr(0), arr(1), arr(2).toInt)
    })

    val  ordersTp = ordersRDD.map(bean => (bean.uid, bean))
    val  userTp = userRDD.map(bean => (bean.uid, bean))

    val joined: RDD[(String, (Orders, User))] = ordersTp.join(userTp)


    ordersTp.fullOuterJoin(userTp).foreach(println)
   /* joined.map(tp=>{
      tp._2._1.uid
      tp._2._2.name
    })
*/

   // ordersTp.zip(ordersTp).foreach(println)
   // ordersTp.union(userTp)


  }

}
