package com.huitian.net


import com.google.inject.{Guice, Inject}
import com.huitian.net.Hive2MysqlApp.Params
import com.huitian.net.core.Adapter
import com.huitian.net.impl.EnAdapterImpl
import com.huitian.net.module.MainModule
import com.huitian.net.pipeline.{EnSource, GxSparkSession}
import com.typesafe.config.ConfigFactory
import org.apache.log4j.Logger
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import scopt.OptionParser

object Hive2MysqlApp extends App {


  val parser = new OptionParser[Params]("Hive2MysqlApp") {
    head("Hive2MysqlApp")

    opt[String]('d', "day")
      .text("which day")
      .action((x, c) => c.copy(day = x))

    opt[String]('l', "lastday")
      .text("which lastday")
      .action((x, c) => c.copy(lastday = x))

    opt[String]('c', "current_year")
      .text("which day")
      .action((x, c) => c.copy(current_year = x))

    opt[String]('p', "last_year")
      .text("which lastday")
      .action((x, c) => c.copy(last_year = x))


    opt[Int]('n', "numPartitions")
      .text("number of partitions")
      .action((x, c) => c.copy(numPartitions = x))


    opt[String]('t', "table")
      .text("which table")
      .action((x, c) => c.copy(table = x))


    help("help").text("prints this usage text")

  }

  parser.parse(args, Params()) match {
    case Some(params) =>
      val injector = Guice.createInjector(MainModule)
      val runner = injector.getInstance(classOf[Hive2MysqlApp])
      ConfigFactory.invalidateCaches()
      runner.run(params)
    case _ => sys.exit(1)
  }

  case class Params(day: String = "",lastday: String = "",current_year: String = "",last_year: String = "", numPartitions: Int = 3, table: String = "")

}


class Hive2MysqlApp @Inject()(
                                gxSparkSession: GxSparkSession[SparkSession],
                                enSource: EnSource[DataFrame],
                                enAdapter: Adapter
                              ) extends Serializable {


  /**
   * 创建job
   *
   * @param params 参数
   */
  private def createNewJob(params: Params): Unit = {

    //日志组件
    val log: Logger = Logger.getRootLogger
    //初始化spark
    val spark: SparkSession = EnAdapterImpl.session()
    //    //获取参数
    //日期
    val day: String = params.day
    val lastday: String = params.lastday
    val current_year: String = params.current_year
    val last_year: String = params.last_year
    val numPartitions: Int = params.numPartitions
    val table: String = params.table


    //从hdfs中获取需要对应日期的数据：
    log.info("读取hive中的数据：" + day+",     lastday==: "+lastday)
    log.info("current_year：" + current_year)
    log.info("last_year：" + last_year)
    log.info("table为： " + table)


    // 根据 table 执行不同逻辑
    table match {
      case "dwd_delivery_nc" =>
        log.info("当前处理的数据为：dwd_delivery_nc")
        val ds = enAdapter.get_dwd_delivery_nc(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dwd_delivery_nc(spark,day,lastday,ds,numPartitions,log);


      case "dws_delivery_in_salor_nc_d" =>
        log.info("当前处理的数据为：dws_delivery_in_salor_nc_d")
        val ds = enAdapter.get_dws_delivery_in_salor_nc_d(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_delivery_in_salor_nc_d(spark,day,lastday,ds,numPartitions,log);

      case "dws_delivery_in_salor_nc_w" =>
        log.info("当前处理的数据为：dws_delivery_in_salor_nc_w")

        val ds = enAdapter.get_dws_delivery_in_salor_nc_w(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_delivery_in_salor_nc_w(spark,day,lastday,ds,numPartitions,log);


      case "dws_delivery_in_salor_nc_w_2" =>
        log.info("当前处理的数据为：dws_delivery_in_salor_nc_w")

        log.info("数据插入到： dwd_delivery_nc")
        //从ods 层抽取数据 所以需要year
        log.info("数据写入 hive 中的 dwd_delivery_nc")
        enAdapter.get_dwd_delivery_nc_hive(spark, current_year,last_year, numPartitions, log)
        log.info(" dwd_delivery_nc 数据写入到mysql")
        //从dwd层抽取数据，所以需要 day
        val dwdds = enAdapter.get_dwd_delivery_nc(spark, day,lastday, numPartitions, log)
        enAdapter.write2dwd_delivery_nc(spark,day,lastday,dwdds,numPartitions,log)
        log.info("========================================================================")
        log.info("========================================================================")
        log.info("========================================================================")

        log.info("当前处理的数据为：dws_delivery_in_salor_nc_d")
        log.info("数据写入 hive 中的 dws_delivery_in_salor_nc_d")
        enAdapter.dws_delivery_in_salor_nc_d_hive(spark, day,lastday, numPartitions, log)
        log.info("dws_delivery_in_salor_nc_d 的数据写入到mysql")
        val dws_d_ds = enAdapter.get_dws_delivery_in_salor_nc_d(spark, day,lastday, numPartitions, log)
        enAdapter.write2dws_delivery_in_salor_nc_d(spark,day,lastday,dws_d_ds,numPartitions,log);
        log.info("========================================================================")
        log.info("========================================================================")
        log.info("========================================================================")

        log.info("当前处理的数据为：dws_delivery_in_salor_nc_m")
        log.info("数据写入 hive 中的 dws_delivery_in_salor_nc_m")
        enAdapter.dws_delivery_in_salor_nc_m_hive(spark, day,lastday, numPartitions, log)
        val dws_m_ds = enAdapter.get_dws_delivery_in_salor_nc_m(spark, day,lastday, numPartitions, log)
        enAdapter.write2dws_delivery_in_salor_nc_m(spark,day,lastday,dws_m_ds,numPartitions,log);
        log.info("========================================================================")
        log.info("========================================================================")
        log.info("========================================================================")

        log.info("生成dws_delivery_in_salor_nc_w表数据。。。。")
        //从dwd层抽取数据，所以需要day
        val ds = enAdapter.get_dws_delivery_in_salor_nc_w(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_delivery_in_salor_nc_w(spark,day,lastday,ds,numPartitions,log);


      case "dws_saleinvoice_in_salor_nc_m_day_by_day" =>
        log.info("当前处理的数据为：dwd_saleinvoice_nc")
        log.info("数据插入到： dwd_saleinvoice_nc")
        //从ods 层抽取数据 所以需要year
        log.info("数据写入 hive 中的 dwd_saleinvoice_nc")
        enAdapter.get_dwd_saleinvoice_nc_hive(spark, current_year,last_year, numPartitions, log)
        log.info("dwd_saleinvoice_nc 数据写入到mysql。。。。。。。")
        val ds = enAdapter.get_dwd_saleinvoice_nc(spark, day,lastday, numPartitions, log)
        enAdapter.write2dwd_saleinvoice_nc(spark,day,lastday,ds,numPartitions,log);

        log.info("========================================================================")
        log.info("========================================================================")
        log.info("========================================================================")
        log.info("数据写入 hive 中的 dws_saleinvoice_in_salor_nc_d")
        enAdapter.get_dws_saleinvoice_in_salor_nc_d_hive(spark, day,lastday, numPartitions, log)
        log.info("dws_saleinvoice_in_salor_nc_d 数据写入到mysql")
        val dws_d_ds = enAdapter.get_dws_saleinvoice_in_salor_nc_d(spark, day,lastday, numPartitions, log)
        enAdapter.write2dws_saleinvoice_in_salor_nc_d(spark,day,lastday,dws_d_ds,numPartitions,log)

        log.info("========================================================================")
        log.info("========================================================================")
        log.info("========================================================================")

        log.info("数据写入 hive 中的 dws_saleinvoice_in_salor_nc_m")
        enAdapter.get_dws_saleinvoice_in_salor_nc_m_hive(spark, day,lastday, numPartitions, log)
        log.info("dws_saleinvoice_in_salor_nc_m 数据写入到mysql")
        val dws_m_ds = enAdapter.get_dws_saleinvoice_in_salor_nc_m(spark, day,lastday, numPartitions, log)
        enAdapter.write2dws_saleinvoice_in_salor_nc_m(spark,day,lastday,dws_m_ds,numPartitions,log);
        log.info("========================================================================")
        log.info("========================================================================")
        log.info("========================================================================")



      case "dws_delivery_in_salor_nc_m" =>
        log.info("当前处理的数据为：dws_delivery_in_salor_nc_m")
        val ds = enAdapter.get_dws_delivery_in_salor_nc_m(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_delivery_in_salor_nc_m(spark,day,lastday,ds,numPartitions,log);

      case "dws_delivery_in_industry_nc_d" =>
        log.info("当前处理的数据为：dws_delivery_in_industry_nc_d")
        val ds = enAdapter.get_dws_delivery_in_industry_nc_d(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_delivery_in_industry_nc_d(spark,day,lastday,ds,numPartitions,log);


      case "dws_delivery_in_industry_nc_m" =>
        log.info("当前处理的数据为：dws_delivery_in_industry_nc_m")
        val ds = enAdapter.get_dws_delivery_in_industry_nc_m(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_delivery_in_industry_nc_m(spark,day,lastday,ds,numPartitions,log);



      case "dwd_saleinvoice_nc" =>
        log.info("当前处理的数据为：dwd_saleinvoice_nc")
        val ds = enAdapter.get_dwd_saleinvoice_nc(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dwd_saleinvoice_nc(spark,day,lastday,ds,numPartitions,log);


      case "dws_saleinvoice_in_salor_nc_d" =>
        log.info("当前处理的数据为：dws_saleinvoice_in_salor_nc_d")
        val ds = enAdapter.get_dws_saleinvoice_in_salor_nc_d(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_saleinvoice_in_salor_nc_d(spark,day,lastday,ds,numPartitions,log);


      case "dws_saleinvoice_in_salor_nc_m" =>
        log.info("当前处理的数据为：dws_saleinvoice_in_salor_nc_m")
        val ds = enAdapter.get_dws_saleinvoice_in_salor_nc_m(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_saleinvoice_in_salor_nc_m(spark,day,lastday,ds,numPartitions,log);

      case "dws_saleinvoice_in_industry_nc_d" =>
        log.info("当前处理的数据为：dws_saleinvoice_in_industry_nc_d")
        val ds = enAdapter.get_dws_saleinvoice_in_industry_nc_d(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_saleinvoice_in_industry_nc_d(spark,day,lastday,ds,numPartitions,log);


      case "dws_saleinvoice_in_industry_nc_m" =>
        log.info("当前处理的数据为：dws_saleinvoice_in_industry_nc_m")
        val ds = enAdapter.get_dws_saleinvoice_in_industry_nc_m(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_saleinvoice_in_industry_nc_m(spark,day,lastday,ds,numPartitions,log);


//      case "dwd_saleinvoice_custome_industry_nc_d" =>
//        log.info("当前处理的数据为：dwd_saleinvoice_custome_industry_nc_d")
//        val ds = enAdapter.get_dwd_saleinvoice_custome_industry_nc_d(spark, day,lastday, numPartitions, log)
//        ds.show(3)
//        ds.printSchema()
//        enAdapter.write2dwd_saleinvoice_custome_industry_nc_d(spark,day,lastday,ds,numPartitions,log);

      case "dws_saleinvoice_in_customer_nc_d" =>
        log.info("当前处理的数据为：dws_saleinvoice_in_customer_nc_d")
        val ds = enAdapter.get_dws_saleinvoice_in_customer_nc_d(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_saleinvoice_in_customer_nc_d(spark,day,lastday,ds,numPartitions,log);


      case "dws_saleinvoice_in_customer_nc_m" =>
        log.info("当前处理的数据为：dws_saleinvoice_in_customer_nc_m")
        val ds = enAdapter.get_dws_saleinvoice_in_customer_nc_m(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_saleinvoice_in_customer_nc_m(spark,day,lastday,ds,numPartitions,log);


      case "dws_tab_fhinfo_hb_nc_m" =>
        log.info("当前处理的数据为：dws_tab_fhinfo_hb_nc_m")
        val ds = enAdapter.get_dws_tab_fhinfo_hb_nc_m(spark, day,lastday, numPartitions, log)
        ds.show(3)
        ds.printSchema()
        enAdapter.write2dws_tab_fhinfo_hb_nc_m(spark,day,lastday,ds,numPartitions,log);






      case _ =>
        log.error(s"未处理的表名: $table")


    }





    spark.stop()

  }


  def run(params: Params): Unit = {
    createNewJob(params)
  }

}
