package com.yuanshi.repair

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

import com.yuanshi.beans.{RepayFinalBean, TmpVintageBean}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

object RepairLast1 {
  val mylog1: Logger = Logger.getLogger("mylog1")
  mylog1.setLevel(Level.ALL)

  def repair(spark: SparkSession, tmp_vintageDF: DataFrame, finalDF: DataFrame, para0: String, para1: String, para2: String, para3: String, para5: String, limitnum: Double): Unit = {
    import spark.implicits._
    //mylog1.error("开始修复，tmp_vintageDF数量："+ tmp_vintageDF.count())
    //2.0将数据封装到bean，装到List集合，方便循环操作
    val tmp_vintageDF1: DataFrame = tmp_vintageDF.na.fill(0)
    val tmp_vintageList: List[TmpVintageBean] = tmp_vintageDF1
      .map(row => {
        try {
          val order_code: String = row.getAs[String]("order_code")
          val effect_time: String = row.getAs[String]("effect_time")
          val order_type: String = row.getAs[String]("order_type")
          val bystages_num: Int = row.getAs[Int]("bystages_num")
          val mob: Int = row.getAs[Int]("mob")
          val inspect_time: String = row.getAs[String]("inspect_time")
          val max_late_days: Int = row.getAs[Int]("max_late_days")
          TmpVintageBean(order_code, effect_time, order_type, bystages_num, mob, inspect_time, max_late_days)
        } catch {
          case _ => null
        }
      }).rdd.filter(_ != null).filter(bean => bean.effect_time >= para0 && bean.effect_time <= para1).collect().toList
    mylog1.error("开始修复，tmp_vintageList长度：" + tmp_vintageList.length)
    /*tmp_vintageDF.createTempView("v_tmp_vintageDF")

    val tmp_vintageList: List[Row] = spark.sql("select * from v_tmp_vintageDF where effect_time >='2016-05-01' and effect_time <='2019-09-01'").collect().toList*/

    val finalDF1: DataFrame = finalDF.na.fill(0)

    val finalList: List[RepayFinalBean] = finalDF1.map { row => {
      try {
        val bs_order_id: Int = row.getAs[Int]("bs_order_id")
        val user_id: Int = row.getAs[Int]("user_id")
        val order_code: String = row.getAs[String]("order_code")
        val effect_time: String = row.getAs[String]("effect_time")
        val order_type: String = row.getAs[String]("order_type")
        val bystages_num: Int = row.getAs[Int]("bystages_num")
        val plan_id: Int = row.getAs[Int]("plan_id")
        val repay_period: Int = row.getAs[Int]("repay_period")
        val plan_repay_time: String = row.getAs[String]("plan_repay_time")
        val real_repay_time: String = row.getAs[String]("real_repay_time")
        val capital: Double = row.getAs[Double]("capital")
        val fee: Double = row.getAs[Double]("fee")
        val service: Double = row.getAs[Double]("service")
        val repay_status: Int = row.getAs[Int]("repay_status")
        val flag: Int = row.getAs[Int]("flag")
        val late_days: String = row.getAs[String]("late_days")
        val late_perio: String = row.getAs[String]("late_perio")
        RepayFinalBean(bs_order_id, user_id, order_code, effect_time, order_type, bystages_num, plan_id, repay_period, plan_repay_time,
          real_repay_time, capital, fee, service, repay_status, flag, late_days, late_perio)
      } catch {
        case _ => null
      }
    }
    }.rdd.filter(_ != null).collect().toList

    //3.0获取可以修复的订单
    //定义时间格式
    val sf: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS")
    val cal = Calendar.getInstance()

    val res1: DataFrame = finalList.map(bean2 => {
      //3.1给生效时间加上mob月后得到新日期
      val d1: Date = sf.parse(bean2.real_repay_time)
      val d2: Date = sf.parse(bean2.effect_time)
      cal.setTime(d2)
      cal.add(Calendar.DAY_OF_MONTH, 1)
      val d3: Date = cal.getTime()
      //3.2根据条件挑出可以修复的订单
      tmp_vintageList.map(bean1 => {
        if (bean2.bystages_num == para3.toInt && bean2.order_type == para2 && bean2.repay_period == para5 /*&& bean1.getAs[Int]("mob") == 1 */ && bean2.effect_time >= para0
          && bean2.flag == null && bean2.effect_time < para1 && d1.before(d3) || bean2.real_repay_time == null
        ) {
          (bean2.order_code, bean2.bs_order_id, bean2.user_id, bean2.repay_period)
        } else {
          null
        }
      })
    }).filter(_ != null).toDF("order_code", "bs_order_id", "user_id", "repay_period")

    res1.createTempView("v_res1")

    val res2List = spark.sql(s"select * from v_res1 order by rand() limit $limitnum").rdd.distinct().collect().toList

    //4.0开始修复逻辑，并写入hive
    val res: DataFrame = finalList.flatMap(bean2 => {
      res2List.map(t => {
        if (bean2.order_code == t.getAs[String]("order_code") && bean2.bs_order_id == t.getAs[Int]("bs_order_id") && bean2.user_id == t.getAs[Int]("user_id") && bean2.repay_period == t.getAs[Int]("repay_period")) {
          (bean2.bs_order_id, bean2.user_id, bean2.order_code, bean2.effect_time, bean2.order_type, bean2.bystages_num, bean2.plan_id, bean2.repay_period, bean2.plan_repay_time,
            bean2.plan_repay_time, bean2.capital, bean2.fee, bean2.service, 1, 1, bean2.late_days, bean2.late_period)
        } else {
          (bean2.bs_order_id, bean2.user_id, bean2.order_code, bean2.effect_time, bean2.order_type, bean2.bystages_num, bean2.plan_id, bean2.repay_period, bean2.plan_repay_time,
            bean2.real_repay_time, bean2.capital, bean2.fee, bean2.service, bean2.repay_status, bean2.flag, bean2.late_days, bean2.late_period)
        }
      })
    }).toDF("bs_order_id", "user_id", "order_code", "effect_time", "order_type", "bystages_num", "plan_id", "repay_period", "plan_repay_time", "real_repay_time", "capital", "fee", "service", "repay_status", "flag", "late_days", "late_perio")

    mylog1.error("修复结束,结果数据数量：" + res.count())
    //将结果写入hdfs
    res.createTempView("v_res")
    spark.sql("insert into jindiao_v2.report_repay_final_repair select * from v_res")
  }
}