package com.haozhen.rdd

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/30  18:14
  */
object MapJoinDemo {

  def main(args: Array[String]): Unit = {
    import org.apache.spark.broadcast.Broadcast
    import org.apache.spark.rdd.RDD
    import org.apache.spark.{SparkConf, SparkContext}
    val init: SparkConf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getCanonicalName().init)

    val sc = new SparkContext(init)

    sc.hadoopConfiguration.setLong("fs.local.block,size",128*1024*1024)

    val rdd1: RDD[(String, String)] = sc.textFile("data/lagou_product_info.txt").map {
      line =>
        val fields = line.split(";")
        (fields(0), line)
    }

    val productBC= sc.broadcast(rdd1.collectAsMap())


    val rdd2: RDD[(String, String)] = sc.textFile("data/orderinfo.txt").map {
      line =>
        val fields = line.split(";")
        (fields(2), line)
    }
//    productBC.value.join(rdd2).count()
    rdd2.map{
      case(pid,orderinfo)=>
        val productMap = productBC.value
        val str: String = productMap.getOrElse(pid,null)

        (pid,(str,orderinfo))
    }.count()



    Thread.sleep(1000000)

    sc.stop()
  }
}
