package com.avcdata.spark.job

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkContext, SparkConf}

object Helper {

  //默认配置文件读取
  val config:Config = ConfigFactory.load()

  //初始化Spark配置
  private val sparkConf = new SparkConf()
    .setIfMissing("spark.master",config.getString("spark.master"))
    .setIfMissing("spark.app.name",config.getString("spark.app.name"))

  //初始化SparkContext
  val sparkContext:SparkContext = new SparkContext(sparkConf)

  //命令行参数解析
  def parseOptions(args: Array[String], index: Int, defaultValue: String): String = {
    if (args.length > 0) args(index) else defaultValue
  }

  //初始化streaming环境
  def getStreamingContext(seconds:Int):StreamingContext={
    return new StreamingContext(sparkConf, Seconds(seconds))
  }

  //初始化sparksql环境
  def getSqlContext(sc:SparkContext):SQLContext={
    return new SQLContext(sc)
  }
}
