package com.yuanshi.cut

import com.yuanshi.repair.{Repair, RepairLast, RepairLast1}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/** var effect_time_start: String = args(0)
  * var effect_time_end: String = args(1)
  * var OrderType: String = args(2)
  * var BystagesNum: String = args(3)
  * var RepayPeriod: Int = args(5)
  * var mob = 1
  * var stage: String = args(4)
  */
object Cut1Last {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val spark: SparkSession = SparkSession.builder().getOrCreate()
    val mylog: Logger = Logger.getLogger("mylog")
    mylog.setLevel(Level.ALL)

    //1.0从hdfs里加载所需要的表并转为DataFrame
    val finalDF: DataFrame = spark.sql("select * from jindiao_v2.report_repay_final")
    mylog.error("finalDF的数据量为：" + finalDF.count())
    val tmp_vintageDF: DataFrame = spark.sql("select * from jindiao_v2.tmp_vintage")
    mylog.error("tmp_vintageDF的数据量为：" + tmp_vintageDF.count())
    val cutDF: DataFrame = spark.sql("select * from jindiao_v2.dws_order_cut_final")
    mylog.error("cutDF的数据量为：" + cutDF.count())

    cutDF.na.fill(0).where("order_type==1 and bystagesnum==12 and mob==1 and m>='2019-05-1' and stage=='1+'")
      /*.toDF("order_type","bystagesnum","mob","m","stage","cutnum1","cutnum31","cutnum61","cutnum91")*/
      .foreach(row => {
        val stage: String = row.getAs[String]("stage")
        val cutnum1: Double = row.getAs[Double]("cutnum1")
        val cutnum31: Double = row.getAs[Double]("cutnum31")
        val cutnum61: Double = row.getAs[Double]("cutnum61")
        val cutnum91: Double = row.getAs[Double]("cutnum91")

      var limitnum: Double = 0.0

      if (args(4).equals("1+")) {
        limitnum = cutnum1
        RepairLast1.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5), limitnum)
      } else if (args(4).equals("31+")) {
        limitnum = cutnum31
        RepairLast1.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5), limitnum)
      } else if (args(4).equals("61+")) {
        limitnum = cutnum61
        RepairLast1.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5), limitnum)
      } else {
        limitnum = cutnum91
        RepairLast1.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5), limitnum)
      }
    })

    spark.close()
  }
}