package chapter07

import java.net.InetAddress

import org.apache.spark.{SparkConf, SparkContext, TaskContext}

/**
 * author: yuhui
 * descriptions:
 * date: 2024 - 11 - 12 1:56 下午
 */
object Closure06 {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("ClosureDemo").setMaster("local[*]")
    //创建SparkContext，使用SparkContext来创建RDD
    val sc: SparkContext = new SparkContext(conf)
    //1,ln
    val lines = sc.textFile("BookData/input/Closure.txt")
    //处理数据，关联维度
    val res = lines.mapPartitions(it => {
      //RuleClassNotSer是在Executor中被初始化的
      //一个分区的多条数据，使用同一个RuleClassNotSer实例
      val rulesClass = new RuleClassNotSer
      it.map(e => {
        val fields = e.split(",")
        val id = fields(0)
        val code = fields(1)
        val name = rulesClass.rulesMap.getOrElse(code, "未知")
        //获取当前线程ID
        val treadId = Thread.currentThread().getId
        //获取当前Task对应的分区编号
        val partitiondId = TaskContext.getPartitionId()
        //获取当前Task运行时的所在机器的主机名
        val host = InetAddress.getLocalHost.getHostName
        (id, code, name, treadId, partitiondId, host, rulesClass.toString)
      })
    })
    res.foreach(println)
    sc.stop()

  }

}