package com.yuanshi.cut

import com.yuanshi.repair.Repair
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}

/** var effect_time_start: String = args(0)
  * var effect_time_end: String = args(1)
  * var OrderType: String = args(2)
  * var BystagesNum: String = args(3)
  * var RepayPeriod: Int = args(5).toInt
  * var mob = 1
  * var stage: String = args(4)
  */
object Cut1 {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val spark: SparkSession = SparkSession.builder().getOrCreate()

    //1.0从hdfs里加载所需要的表并转为DataFrame
    val finalDF: DataFrame = spark.read.textFile("hdfs://cm-master:9000/opt/hive/warehouse/jindiao_v2/report_repay_final")
      .toDF("bs_order_id", "user_id", "order_code", "effect_time", "order_type", "bystages_num", "plan_id", "repay_period",
        "plan_repay_time", "real_repay_time", "capital", "fee", "service", "repay_status", "flag", "late_days", "late_period")

    val tmp_vintageDF: DataFrame = spark.read.textFile("hdfs://cm-master:9000/opt/hive/warehouse/jindiao_v2/tmp_vintage")
      .toDF("order_code", "effect_time", "order_type", "bystages_num", "mob", "inspect_time", "max_late_days")

    val cutDF: DataFrame = spark.read.textFile("hdfs://cm-master:9000/opt/hive/warehouse/jindiao_v2/dws_order_cut")
      .toDF("order_type", "bystagesnum", "mob", "m", "stage", "cutnum1", "cutnum31", "cutnum61", "cutnum91")

    var limitnum: Double = 0.0
    cutDF.where("order_type==args(2) and bystagesnum==args(3) and mob==1 and m==args(0) and stage==args(4)")
      .rdd.map(row => {
      val stage: Double = row.getAs[Double]("stage")
      val cutnum1: Double = row.getAs[Double]("cutnum1")
      val cutnum31: Double = row.getAs[Double]("cutnum31")
      val cutnum61: Double = row.getAs[Double]("cutnum61")
      val cutnum91: Double = row.getAs[Double]("cutnum91")

      if (stage.equals("1+")) {
        limitnum = cutnum1
        Repair.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5))
      } else if (stage.equals("31+")) {
        limitnum = cutnum31
        Repair.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5))
      } else if (stage.equals("61+")) {
        limitnum = cutnum61
        Repair.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5))
      } else {
        limitnum = cutnum91
        Repair.repair(spark, tmp_vintageDF, finalDF, args(0), args(1), args(2), args(3), args(5))
      }
    })
    spark.close()
  }
}
