package com.zeta.edw.venus

import org.backuity.clist.{Command, arg, opt}

class Config(description: String = "") extends Command(name = "spark-submit --class <main-class> <app jar>", description) {
  var sparkMaster: Option[String] = opt[Option[String]](
    description = "Spark master address. e.g. spark://localhost:7077, local[*]"
  )

  var hiveDatabase: String = arg[String](name = "hive-db")

  var hiveTablePrefix: Option[String] = opt[Option[String]]()

  var hiveMetastoreUris: String = arg[String](name = "hive-metastore-uris",
    description = "a list of uris to connect to hive meta store, e.g. thrift://host1:9083,thrift://host2:9083"
  )

  var hdfsUrl: String = arg[String](name = "hdfs-url")

//  var impalaHosts: String = arg[String](name = "impala-hosts",
//    description = "comma separated hosts for impala jdbc connection"
//  )

  var jdbcUrl: String = arg[String](name = "jdbc-url")

  var jdbcUsername: String = arg[String](name = "jdbc-username")

  var jdbcPassword: String = arg[String](name = "jdbc-password")

  var dbSchema: Option[String] = opt[Option[String]](name = "db-schema")

  var readRePartition: Int = opt[Int](name = "read-repartition", default = 2)

  var table: String = arg[String](name = "table")

  lazy val jdbcTable: String = if (dbSchema.isEmpty) table else s"${dbSchema.get}.$table"

  lazy val hiveTable: String = if (hiveTablePrefix.isEmpty) {
    table.toLowerCase()
  } else {
    s"${hiveTablePrefix.get}_${table.toLowerCase()}"
  }
}
