package com.ctbri.manage.compute.scala

import com.ctbri.manage.compute.scala.calculate.{IdentityChecker, TemplateBaseChecker}
import org.apache.spark.sql.SparkSession
import org.slf4j.{Logger, LoggerFactory}

/**
 * @Author wangxuem
 * @create 2023/3/6 18:16
 */
object Compute {
  protected final val logger: Logger = LoggerFactory.getLogger(this.getClass)
  def main(args: Array[String]): Unit = {
    logger.warn("IDCComputer begins to run...................")
    val spark = SparkSession
      .builder()
      .master("local[4]")
      .enableHiveSupport()
      .appName("data-manage")
      .getOrCreate()

    spark.sparkContext.setLogLevel("WARN")
    if (args.isEmpty) {
      logger.warn("对不起，参数个数错误！")
      System.exit(1)
    }
    args(0) match {
      case "import_data" =>
        if (args.length != 5) {
          logger.warn("参数个数不正确")
          System.exit(1)
        }
      case "export_diff" =>
        if (args.length != 2) {
          logger.warn("参数个数不正确")
          System.exit(1)
        }
      case "import_data_plus" =>
        if (args.length != 3) {
          logger.warn("参数个数不正确")
          System.exit(1)
        }
      case "static_province" =>
        if (args.length != 2) {
          logger.warn("参数个数不正确")
          System.exit(1)
        }
      case "template_check" =>
        if (args.length != 3) {
          logger.warn("参数个数不正确")
          System.exit(1)
        }
      case _ => logger.warn("对不起，第一个参数输入错误！")

    }

    args(0) match {
      //数据导出
      case "import_data" => IdentityChecker.uploadToHive(spark,args(1),args(2),args(3),args(4))
      case "import_data_plus" => IdentityChecker.uploadToHivePlus(spark,args(1),args(2))
      case "export_diff" => IdentityChecker.exportDataDiff(spark,args(1))
      case "static_province" => IdentityChecker.staticProvince(spark,args(1))
      case "template_check" => TemplateBaseChecker.check(spark,args(1),args(2))
      case _ => logger.warn("程序未执行")
    }
    spark.close()
    logger.warn("IDCComputer finish!")
  }

}
