package com.bd03.cmccmonitor.conf

import java.util

import com.typesafe.config.{Config, ConfigFactory, ConfigObject}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import scalikejdbc.config.DBs

object ConfigHelper {
  /**
   * cmcc 相关配置封装
   */

  DBs.setup()
  private lazy val CONFIG: Config = ConfigFactory.load()
  val TOPIC = Array(CONFIG.getString(Constant.TOPIC))
  val BROKERS = CONFIG.getString(Constant.BROKERS)
  val GROUPID = CONFIG.getString(Constant.GROUPID)

  /**
   * 获取StreamingContext对象
   * @return
   */
  def getSparkStreamingContext={
    val conf = new SparkConf()
      .setAppName("cmcc_monitor")
      .setMaster("local[*]")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      //.set()
    new StreamingContext(conf,Seconds(3))
  }

  /**
   * kafka的消费者参数
   */
  val KAFKAPARAMS={
    Map[String,Object](
      "bootstrap.servers" -> BROKERS,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> GROUPID,
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
  }

  import scala.collection.JavaConversions._
  val PCODE2PNAME = CONFIG.getObject(Constant.PCODE2PNAME).unwrapped().toMap

}
