package com.cmnit

import com.cmnit.model.{GantryLicencePlate, TollLicencePlate}
import com.cmnit.utils.ConfigurationManager
import org.apache.hadoop.security.UserGroupInformation
import org.apache.log4j.Logger
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{DataFrame, SparkSession}

object Main {
  val logger: Logger = Logger.getLogger(Main.getClass)

  def main(args: Array[String]): Unit = {

    var appName = args(0)
    var year = ""
    var month = ""
    var day = ""
    var hour = ""
    if (args.length == 5) {
      year = args(1)
      month = args(2)
      day = args(3)
      hour = args(4)
    }

    // kerberos配置
    System.setProperty("java.security.krb5.conf", ConfigurationManager.getProperty("java.security.krb5.conf"))
    System.setProperty("zookeeper.sasl.clientconfig", "Client")
    System.setProperty("zookeeper.sasl.client", "true")

    /*// TODO 本地模式需要用户密码传入
    UserGroupInformation.loginUserFromKeytab(ConfigurationManager.getProperty("kerberos.principal"), ConfigurationManager.getProperty("keytab.file"))
    logger.info("login user: " + UserGroupInformation.getLoginUser)*/

    if (appName.isEmpty) {
      appName = ConfigurationManager.getProperty("app.name")
    }
    logger.info("APP名称：" + appName)

    val mode = ConfigurationManager.getProperty("spark.runMode")
    logger.info("运行方式：" + mode)

    val conf = new SparkConf
    conf.set("spark.serializer", ConfigurationManager.getProperty("spark.serializer"))
    conf.set("spark.scheduler.mode", ConfigurationManager.getProperty("spark.scheduler.mode"))
    conf.set("spark.executor.extraJavaOptions", ConfigurationManager.getProperty("spark.executor.extraJavaOptions"))
    conf.set("spark.default.parallelism", ConfigurationManager.getProperty("spark.default.parallelism"))
    conf.set("spark.sql.shuffle.partitions", ConfigurationManager.getProperty("spark.sql.shuffle.partitions"))
    conf.set("sparkSession.debug.maxToStringFields", ConfigurationManager.getProperty("sparkSession.debug.maxToStringFields"))
    conf.set("hive.exec.dynamic.partition", "log4j.propertiestrue")
    conf.set("hive.exec.dynamic.partition.mode", "nonstrict")
    //确保application kill 后接收的数据能被处理完在关闭
    conf.set("spark.streaming.stopGracefullyOnShutdown", "true")

    //本地提交为2.1.1,oozie提交为2.1
    // conf.set("spark.sql.hive.metastore.version", "2.1.1")
    //SPARK SQL ERROR: Detected cartesian product for INNER join between logical plans报错解决方法
    conf.set("spark.sql.crossJoin.enabled", "true")

    // 获取sparkSession对象
    val sparkSession: SparkSession = SparkSession.builder.config(conf).master(mode).appName(appName).enableHiveSupport.getOrCreate
    logger.info("创建sparkSession对象")

    // 车道/收费站中间表
    val tollFrame = sparkSession.sql(
      "select " +
        "p.ORGID as tollstationProvince, " +
        "p.ORGNAME as tollstationName, " +
        "m.ORGID as roadId, " +
        "m.ORGNAME as roadName, " +
        "'江西省' as province " +
        "from " +
        "(select " +
        "distinct " +
        "orgid," +
        "orgname," +
        "BL_ROAD " +
        "from " +
        "dim.dim_tbl_orgcodedic " +
        "where " +
        "ORGTYPE='6' " +
        "and lastver in (select max(lastver) from dim.dim_tbl_orgcodedic)) p " +
        "LEFT JOIN " +
        "(select " +
        "distinct " +
        "orgid," +
        "orgname " +
        "from " +
        "dim.dim_tbl_orgcodedic " +
        "where " +
        "ORGTYPE='8' " +
        "and lastver in (select max(lastver) from dim.dim_tbl_orgcodedic)) m " +
        "on p.BL_ROAD=m.ORGID ")
    logger.info("广播收费站中间表")

    // 门架中间表
    val gantryFrame = sparkSession.sql(
      "select " +
        "id," +
        "gantryname," +
        "gantryid," +
        "gantrytype," +
        "'江西省' as province " +
        "from " +
        "dim.dim_tbl_gantrybaseinfo")
    logger.info("广播门架中间表")

    // 车牌-车辆类型码表
    val vehicleBasic = sparkSession.sql(
      "select " +
        "vehicleplate," +
        "vehicletype " +
        "from " +
        "dim.dim_tbl_vehiclebasicinfo"
    )
    logger.info("车牌-车辆类型码表")

    // 广播两个临时表
    val gantryFrameNewBroadCast: Broadcast[DataFrame] = sparkSession.sparkContext.broadcast(gantryFrame)
    val tollFrameBroadCast: Broadcast[DataFrame] = sparkSession.sparkContext.broadcast(tollFrame)
    val vehicleBasicBroadCast: Broadcast[DataFrame] = sparkSession.sparkContext.broadcast(vehicleBasic)

    //生成dws数据
    logger.info("开始加工数据")
    appName match {
      // 测试
      case "GantryLicencePlate" => GantryLicencePlate.run(sparkSession, gantryFrameNewBroadCast, vehicleBasicBroadCast, year, month, day, hour)
      case "TollLicencePlate" => TollLicencePlate.run(sparkSession, tollFrameBroadCast, vehicleBasicBroadCast, year, month, day, hour)

      case _ => {
        logger.error("输入要运行的程序不存在,程序退出")
        System.exit(1)
      }
    }
  }
}
