package com.event.streaming.config
import com.event.streaming.config.setttings._
import com.typesafe.config.{Config, ConfigFactory}
import scala.util.Try
import scala.collection.JavaConversions._
import scala.concurrent.duration._

//The application settings
object Settings {
  //debug mode
  private var __debugEnabled: Boolean = true
  //debug data dir
  private var __debugHDFSDir: String = "hdfs:///user/events/debug"

  //spark configs
  private var sparkConfs: Map[String, String ] = Map.empty[String, String]
  //property
  def spark:Map[String, String] = sparkConfs

  //kafka - source
  object Source {
    //broker url
    private var _kafkaBorkerUrl: String = _
    //schema registry url
    private var _schemaRegistryUrl: Option[String] = None

    //poll interval in milli-seconds
    private var _pollingInterval: Duration = _
    //streaming interval in seconds
    def streamingInterval: Duration = _pollingInterval
    //poll batch-size
    private var _pollingBatchSize: Int = _
    //pooling batch size
    def streamingBatchSize: Int = _pollingBatchSize
    //poll time out in milli-seconds
    private var _pollingTimeout: Duration = _
    //window length
    private var _windowLength: Duration = _
    //sliding interval
    private var _slidingInterval: Duration = _

    //The Producer Settings
    trait Producer extends ProducerSettings with DebugSettings {
      //broker url
      def kafkaBrokerUrl: String = _kafkaBorkerUrl
      //schema registry url
      override def schemaRegistryUrl: Option[String] = _schemaRegistryUrl

      //flag to indicate whether or not in debug mode
      override def debugEnabled: Boolean = __debugEnabled
      //the debug dir
      override def debugHDFSDir: String = __debugHDFSDir

    }
    //the Consumer Settings
    trait Consumer extends ConsumerSettings with DebugSettings {
      //broker url
      def kafkaBrokerUrl: String = _kafkaBorkerUrl
      //schema registry url
      override def schemaRegistryUrl: Option[String] = _schemaRegistryUrl

      //pooling batch size
      override def pollingBatchSize: Int = _pollingBatchSize
      //polling time out in milli-seconds
      override def pollingTimeout: Duration = _pollingTimeout

      //flag to indicate whether or not in debug mode
      override def debugEnabled: Boolean = __debugEnabled
      //the debug dir
      override def debugHDFSDir: String = __debugHDFSDir
    }
    //the streaming consumer
    trait StreamingConsumer extends StreamingConsumerSettings with DebugSettings {
      //broker url
      def kafkaBrokerUrl: String = _kafkaBorkerUrl
      //schema registry url
      override def schemaRegistryUrl: Option[String] = _schemaRegistryUrl

      //streaming interval in seconds
      def streamingInterval: Duration = _pollingInterval
      //pooling batch size
      def streamingBatchSize: Int = _pollingBatchSize

      //the length of a window
      def windowLength: Duration = _windowLength
      //the interval
      def slidingInterval: Duration = _slidingInterval

      //flag to indicate whether or not in debug mode
      override def debugEnabled: Boolean = __debugEnabled
      //the debug dir
      override def debugHDFSDir: String = __debugHDFSDir
    }

    //load
    def initialize(config: Config): Unit = {
      //load
      this._kafkaBorkerUrl = config.getString("events.source.kafka.brokerUrl")
      //schema registry url
      val srUrl = config.getString("events.source.schema.registryUrl")
      this._schemaRegistryUrl = if (srUrl == null || srUrl.length <= 0) None else Some(srUrl)

      //poll interval in milli-seconds
      this._pollingInterval = Duration(config.getString("events.source.streaming.interval"))
      this._pollingBatchSize = config.getInt("events.source.streaming.batchSize")
      this._pollingTimeout = Duration(config.getString("events.source.streaming.timeout"))
      this._windowLength = Duration(config.getString("events.source.streaming.window.length"))
      this._slidingInterval = Duration(config.getString("events.source.streaming.sliding.length"))
    }
  }

  //model settings
  object Model {
    //model directory
    private var _mlDir: String = _
    //the model load interval in seconds
    private var _mlInterval: Duration = _

    //the settings
    trait Settings extends ModelSettings with DebugSettings {
      //property
      def dir: String = _mlDir
      //property
      override def loadInterval: Duration = _mlInterval

      //flag to indicate whether or not in debug mode
      override def debugEnabled: Boolean = __debugEnabled
      //the debug dir
      override def debugHDFSDir: String = __debugHDFSDir
    }

    //load
    def initialize(config: Config): Unit = {
      //dir
      this._mlDir = config.getString("events.model.dir")
      //interval
      this._mlInterval = Duration(config.getString("events.model.refresh.interval"))
    }
  }

  //target sink
  object Sink {
    //driver
    private var _dbDriver: String = _
    //db url
    private var _dbUrl: String = _

    //user
    private var _dbUser: String = _
    //password
    private var _dbPwd: String = _

    //the settings
    trait Settings extends DatabaseSettings with DebugSettings {
      //property
      def sinkDbDriver: String = _dbDriver
      //property
      def sinkDbUrl: String = _dbUrl

      //property
      def sinkDBUser: String = _dbUser
      //property
      def sinkDBPwd: String = _dbPwd

      //flag to indicate whether or not in debug mode
      override def debugEnabled: Boolean = __debugEnabled
      //the debug dir
      override def debugHDFSDir: String = __debugHDFSDir
    }

    //load
    def initialize(config: Config): Unit = {
      //table
      this._dbDriver = config.getString("events.sink.db.driver")
      //load
      this._dbUrl = config.getString("events.sink.db.url")
      //user
      this._dbUser = config.getString("events.sink.db.user")
      //password
      this._dbPwd = config.getString("events.sink.db.pwd")
    }
  }

  //initialize
  def initialize(configFile: Option[String]): Unit = {
    //check
    val config = configFile match {
      case Some(cfgFile) => ConfigFactory.load(cfgFile)
      case _ => ConfigFactory.load()
    }

    //reset
    sparkConfs = Map.empty[String, String]
    //spark config
    config.getConfig("spark").entrySet().foreach(e => sparkConfs += ("spark." + e.getKey() -> e.getValue.unwrapped().toString()))
    //source
    this.Source.initialize(config)
    //model
    this.Model.initialize(config)
    //database
    this.Sink.initialize(config)

    //the debug flag
    this.__debugEnabled = Try(config.getBoolean("events.debug.enabled")).toOption.getOrElse(true)
    this.__debugHDFSDir = config.getString("events.debug.hdfs.dir")
  }
}
