package cn.tecnova.utils

import java.util.Properties

import cn.tecnova.cd.Base
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}

/**
  * description:获取配置文件对象
  **/
object ConfigHandler {

  private lazy val config: Config = ConfigFactory.load()

  //mysql参数
  val url = config.getString("db.default.url")
  val driver = config.getString("db.default.driver")
  val user: String = config.getString("db.default.user")
  val passwd = config.getString("db.default.password")


  //kafka连接路径
  val bootstrapServers: String = config.getString("bootstrap.servers")

  //封装配置mysql的props
  val props = new Properties
  props.setProperty("driver", driver)
  props.setProperty("user", user)
  props.setProperty("password", passwd)

  val props2 = new Properties
  props2.setProperty("driver", driver)
  props2.setProperty("user", "root")
  props2.setProperty("password", "root")


  //kafka生产者配置参数
  val kafkaProps = new Properties()
  // 设置brokers地址
  kafkaProps.setProperty("bootstrap.servers", bootstrapServers)

  // kafka的  key  和 value的序列化
  kafkaProps.setProperty("key.serializer", classOf[StringSerializer].getName)
  kafkaProps.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
  kafkaProps.setProperty("acks", "all")


  //kafka对接Streaming参数
  def kafkaParams(groupid: String) = Map[String, Object](
    "bootstrap.servers" -> bootstrapServers,
    "key.deserializer" -> classOf[StringDeserializer],
    "value.deserializer" -> classOf[StringDeserializer],
    "group.id" -> groupid,
//    "request.timeout.ms" -> "31000",
    "auto.offset.reset" -> "earliest",
//    "auto.offset.reset" -> "latest",
    "enable.auto.commit" -> (false: java.lang.Boolean) //不记录消费的偏移量信息
  )



  //kafka主题
  //nlp数据源 消费主题
  val topic: String = config.getString("sourceTopic")

  //nlp kafka数据
  val NLPtopic: String = config.getString("NLPtopic")
  //基础事务流kafka数据
  val baseFlowtopic: String = config.getString("BaseFlowtopic")
  //业务分析流kafka数据
  val batopic: String = config.getString("Batopic")

  //用户相关文章库kafka数据源
  val baUserRelationArticleTopic = config.getString("BaUserRelationArticleTopic")

  //用户数据量分析信息库
  val baVolumeAnalysisTopic = config.getString("BaVolumeAnalysisTopic")


  //redis
  val redisHost: String = config.getString("redis.host")
  val redisPort = config.getInt("redis.port")

  val DRAtopic: String = config.getString("DRAtopic")
  val ERAtopic: String = config.getString("ERAtopic")

  //es
  val clusterName: String = config.getString("cluster.name")
  val esHost: String = config.getString("es.host")
  val esHost1: String = config.getString("es.host1")
  val esHost2: String = config.getString("es.host2")
  val esHost3: String = config.getString("es.host3")
   val esPort: String = config.getString("es.port")
  val esTCPPort: Int = config.getInt("es.TCPport")
  val esIndex: String = config.getString("es.index")
  val esType: String = config.getString("es.type")
  val esNodes: String = config.getString("es.nodes")


}
