package cn.tiakon.dmp.untils

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.SparkContext


/**
  * 工具类
  *
  * @author Tiakon
  *         2018/3/28 19:24
  */
object Utils {


  lazy val load: Config = ConfigFactory.load()


  /**
    * String 转成 Int
    */
  def str2Int(str: String): Int = {
    try {
      str.toInt
    } catch {
      case _ => 0
    }
  }

  /**
    * String 转成 Double
    */
  def str2Double(str: String): Double = {
    try {
      str.toDouble
    } catch {
      case _ => 0
    }
  }

  /**
    * 读取本机core-site.xml配置文件，根据指向的文件系统删除文件
    *
    * @param sc       SparkContext对象，从中获取hadoop的配置信息
    * @param filePath 删除文件的路径
    */
  def deleteFileByCoreSite(sc: SparkContext, filePath: String): Boolean = {
    val fs: FileSystem = FileSystem.get(sc.hadoopConfiguration)
    val path: Path = new Path(filePath)
    if (fs.exists(path)) {
      fs.delete(path, true)
    }
    false
  }

}
