package com.migu.utils

/**
  * Created by linyi on 2016/10/12.
  */
import java.io.{BufferedReader, InputStreamReader}

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path, _}
import org.apache.spark.SparkConf

object ConfigUtil {

  var config:Map[String, String] = Map[String, String]();
  var configData:Map[String, String] = Map[String, String]();
  // 加载配置信息
  def loadConfig(conf:SparkConf) = {
    try {
      val confList = conf.getAll
      confList.foreach(x => {
       // println(x._1 + " => " + x._2)
        config += (x._1 -> x._2)
      }
      )
    } catch {
      case exception:Exception=>
        println(exception.printStackTrace())
        None
    }
  }

  // 读取集群上hdfs的配置文件
  def loadHdfsConfig(confPath:String) = {
    val conf:Configuration= new Configuration()
    val hdfs :FileSystem= FileSystem.get(conf)

    val inputStream : FSDataInputStream = hdfs.open(new Path(confPath))
    val br = new BufferedReader(new InputStreamReader(inputStream))

    var line = ""
    while(line != "exit") {// 配置文件中循环的读取终止字符，删了无限循环，谁删了老子跟谁急！！！！！！！
      line = br.readLine()
      val param = line.split("=")
      if(param != null && param.length ==2 && param.apply(0) !=null && param.apply(1) != null)
      configData += (param.apply(0) -> param.apply(1))
    }
  }

  def isDebug() : Boolean = {
    var isDebug = false;
    val sysConf = getConfig()
    if(sysConf.getOrElse("isDebug", "") == "true")
    {
        isDebug = true
    }

    return isDebug

  }

  def getConfig() : Map[String, String] = {
    if(config!=null && config.size > 0)
    {
      return config
    }
    else throw new IllegalArgumentException("can not get the spark config file!")
  }

  def getHdfsConfig() : Map[String, String] = {
    if(configData!=null && configData.size > 0)
    {
      return configData
    }
    else throw new IllegalArgumentException("can not get the hdfs config file!")
  }
}