package com.fwmagic.spark.core.cases.threadsafe

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object TaskThreadSafe2 {
    def main(args: Array[String]): Unit = {
        val isLocal = args(0).toBoolean

        val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName)
        if (isLocal) {
            conf.setMaster("local[*]")
        }

        val sc = new SparkContext(conf)

        val lines: RDD[String] = sc.textFile(args(1))

        /**
          * DateUtilsClass对象在Driver端创建
          * 1.DateUtilsClass类需要实现序列化才可被发送到Executor端
          * 2.DateUtilsClass在Executor中每个Task独享一份，不会有线程安全的问题
          */
        val dateUtils = new DateUtilsClass

        val mapRDD: RDD[(String, Long)] = lines.map(line => {
            val time: Long = dateUtils.getTime(line)
            (line, time)
        })

        val tuples: Array[(String, Long)] = mapRDD.collect()

        tuples.foreach(println)

        sc.stop()
    }
}
