package chapter07

import java.net.InetAddress

import org.apache.spark.{SparkConf, SparkContext, TaskContext}

/**
 * author: yuhui
 * descriptions:
 * date: 2024 - 11 - 12 1:53 下午
 */
object Closure05 {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("ClosureDemo").setMaster("local[*]")
    //创建SparkContext，使用SparkContext来创建RDD
    val sc: SparkContext = new SparkContext(conf)
    //1,ln
    val lines = sc.textFile("BookData/input/Closure.txt")

    //处理数据，关联维度
    val res = lines.map(e => {
      val fields = e.split(",")
      val id = fields(0)
      val code = fields(1)
      //RuleClassNotSer是在Executor中被初始化的
      val rulesClass = new RuleClassNotSer
      //但是如果每来一条数据new一个RuleClassNotSer，不好，效率低，浪费资源，频繁GC
      val name = rulesClass.rulesMap.getOrElse(code, "未知")
      //获取当前线程ID
      val treadId = Thread.currentThread().getId
      //获取当前Task对应的分区编号
      val partitiondId = TaskContext.getPartitionId()
      //获取当前Task运行时的所在机器的主机名
      val host = InetAddress.getLocalHost.getHostName
      (id, code, name, treadId, partitiondId, host, rulesClass.toString)
    })

    res.foreach(println)
  }

}