package chapter07

import java.net.InetAddress

import org.apache.spark.{SparkConf, SparkContext, TaskContext}

/**
 * author: yuhui
 * descriptions:
 * date: 2024 - 11 - 12 1:27 下午
 */
object Closure01 {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("ClosureDemo").setMaster("local[*]")
    //创建SparkContext，使用SparkContext来创建RDD
    val sc: SparkContext = new SparkContext(conf)

    // spark读取本地数据
    val lines = sc.textFile("BookData/input/Closure.txt")

    // 没有实现序列化接口，并且是在在Driver端初始化的，会出异常EROOR
//  val rulesObj = RuleObjectNotSer
    val rulesObj = new RuleClassNotSer

    //函数实在Driver定义的
    val func = (line: String) => {
      val fields = line.split(",")
      val id = fields(0)
      val code = fields(1)
      val name = rulesObj.rulesMap.getOrElse(code, "未知") //闭包
      //获取当前线程ID
      val treadId = Thread.currentThread().getId
      //获取当前Task对应的分区编号
      val partitiondId = TaskContext.getPartitionId()
      //获取当前Task运行时的所在机器的主机名
      val host = InetAddress.getLocalHost.getHostName
      (id, code, name, treadId, partitiondId, host, rulesObj.toString)
    }

    //处理数据，关联维度
    val res = lines.map(func)
    res.foreach(println)

  }

}