package com.itcj.dmp.utils

import java.util
import java.util.Map

import com.typesafe.config.{ConfigFactory, ConfigValue}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.SparkSession

import scala.collection.mutable

class SparkConfigHelper(build: SparkSession.Builder) {
  val config = ConfigFactory.load("spark")


  def loadConfig(): SparkSession.Builder = {

    import scala.collection.JavaConverters._
    val configSet: mutable.Set[Map.Entry[String, ConfigValue]] = config.entrySet().asScala
    for (conf <- configSet) {

      val key =conf.getKey
      val value= conf.getValue.unwrapped().toString
      val origen= conf.getValue.origin().resource()
//      println(key,value,origen)
      if (StringUtils.isBlank(origen)){
        build.config(key,value)
      }
    }
    build
  }


}


object SparkConfigHelper {


  implicit def spark2helper(build: SparkSession.Builder): SparkConfigHelper = {
    new SparkConfigHelper(build)
  }

  def main(args: Array[String]): Unit = {


    SparkSession.builder()
      .master("local[2]")
      .appName("SparkConfigHelper")
      .loadConfig()
      .getOrCreate()


  }


}