package com.zyh.day04

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.spark_project.jetty.server.Authentication.User

object BroadcastTest2 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName("bt")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)
    val usersRdd: RDD[User] = sc.textFile("D:/users.txt")
      .map(line => line.split("\\s+"))
      .map(array => User(array(0).toInt, array(1), array(2).toInt, array(3)))

    val transactionsRdd: RDD[Transaction] = sc.textFile("D:/transactions.txt")
      .map(line => line.split("\\s+"))
      .map(array => Transaction(array(0).toInt, array(1), array(2).toDouble, array(3).toInt))

    val usersPairRdd: RDD[(Int, User)] = usersRdd.map(user => (user.uid, user))
    //将usersRdd收集成Map
    val usersMap: collection.Map[Int, User] = usersPairRdd.collectAsMap()
    //将Map做成广播变量
    val usersMapBroadcast: Broadcast[collection.Map[Int, User]] = sc.broadcast(usersMap)

    val resultRdd: RDD[(User, Transaction)] = transactionsRdd.mapPartitions(transactions => {
      val usersMap: collection.Map[Int, User] = usersMapBroadcast.value
      //根据uid，从usersMap中获取transaction对应的user，并返回(user,transaction)结构的数据
      transactions.map(transaction => (usersMap(transaction.uid), transaction))
    })
    //val transactionsPairRdd: RDD[(Int, Transaction)] = transactionsRdd.map(transaction => (transaction.uid, transaction))

    //val resultRdd: RDD[(User, Transaction)] = usersPairRdd.join(transactionsPairRdd)
    //  .map(_._2)

    resultRdd.foreach(element=>println(element))

    sc.stop()
  }
}
case class User(uid:Int,name:String,age:Int,sex:String)
case class Transaction(tid:Int,category:String,trade:Double,uid:Int)
