package com.cmnit

import com.cmnit.service.{HdfsService, HiveService, YarnService}
import com.cmnit.utils.{ConfigurationManager, ServiceUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.security.UserGroupInformation
import org.apache.log4j.Logger

object App {
  val logger: Logger = Logger.getLogger(App.getClass)

  def main(args: Array[String]): Unit = {
    // 获取程序类型（test/product），默认product
    var appType = "product"
    if (args.length >= 1) {
      appType = args(0)
    }
    ConfigurationManager.setResources(appType)

    // 获取需要运行的流程，默认全部（all）运行
    var appName = "all"
    if (args.length >= 2) {
      appName = args(1)
    }

    // 获取加工账期（默认当前时间的上1小时）
    var time = ServiceUtils.getAcctDate("", "", "", "")
    if (args.length >= 6) {
      val year = args(2)
      val month = args(3)
      val day = args(4)
      val hour = args(5)
      time = ServiceUtils.getAcctDate(year, month, day, hour)
    }

    // kerberos配置
    System.setProperty("java.security.krb5.conf", ConfigurationManager.getProperty("java.security.krb5.conf"))
    System.setProperty("zookeeper.sasl.clientconfig", "Client")
    System.setProperty("zookeeper.sasl.client", "true")

    // TODO 本地模式需要用户密码传入
    UserGroupInformation.loginUserFromKeytab(ConfigurationManager.getProperty("kerberos.principal"), ConfigurationManager.getProperty("keytab.file"))
    val conf = new Configuration
    conf.set("hadoop.security.authentication", "Kerberos")
    UserGroupInformation.setConfiguration(conf)
    logger.info("login user: " + UserGroupInformation.getLoginUser)

    // 没有传入程序名称则全部执行
    if ("all".equals(appName)) {
      println("执行全部流程")
      HdfsService.getHdfsResources()
      HdfsService.getHdfsSize(time)
      HiveService.getTableCount(time)
      YarnService.getYarnResources()
      YarnService.getApplicationByXml()
    } else {
      println("appName:" + appName)
      appName match {
        case "yarn-application" => YarnService.getApplicationByXml()
        case "yarn-resource" => YarnService.getYarnResources()
        case "hdfs-resource" => HdfsService.getHdfsResources()
        case "hdfs-size" => HdfsService.getHdfsSize(time)
        case "hive-count" => HiveService.getTableCount(time)
        case _ => System.exit(1)
      }
    }
  }
}
