package chapter07

import java.net.InetAddress

import org.apache.spark.{SparkConf, SparkContext, TaskContext}

/**
 * author: yuhui
 * descriptions:
 * date: 2024 - 11 - 12 1:42 下午
 */
object Closure03 {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("ClosureDemo").setMaster("local[*]")
    //创建SparkContext，使用SparkContext来创建RDD
    val sc: SparkContext = new SparkContext(conf)

    val lines = sc.textFile("BookData/input/Closure.txt")
    //函数外部定义的一个引用类型（变量）
    //RuleClassNotSer是一个类，需要new才能实现（实在Driver被初始化的）
    val rulesClass = new RuleClassSer

    //处理数据，关联维度
    val res = lines.map(e => {
      val fields = e.split(",")
      val id = fields(0)
      val code = fields(1)
      val name = rulesClass.rulesMap.getOrElse(code, "未知") //闭包
      //获取当前线程ID
      val treadId = Thread.currentThread().getId
      //获取当前Task对应的分区编号
      val partitiondId = TaskContext.getPartitionId()
      //获取当前Task运行时的所在机器的主机名
      val host = InetAddress.getLocalHost.getHostName
      (id, code, name, treadId, partitiondId, host, rulesClass.toString)
    })

    res.foreach(println)

  }

}