package hbase

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

import scala.util.control.Breaks.break

object CheckDataReady {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().enableHiveSupport().
      config(new SparkConf().setAppName("CheckDataReady")).getOrCreate()
    println("初始化spark环境成功")

    val tableNameCampaignMap = Map("aa" -> "a_a", "bb" -> "bb", "cc" -> "cc")
    println(tableNameCampaignMap("aa"))

    val df: DataFrame = spark.sql(s"select * from dev_sztoc_audiencemanager.test8")
    var preCountNums: Long = df.count()
    val startCheckDataTime: Long = System.currentTimeMillis()
    var flag = true;
    while ((System.currentTimeMillis() - startCheckDataTime) / 1000 < (30 * 60) && flag) {
      Thread.sleep(1 * 60 * 1000)
      val afterCountNums: Long = df.count()
      if (afterCountNums == preCountNums && (afterCountNums > 0 && preCountNums > 0)) {
        println("校验成功，CountNums数量为" + afterCountNums)
        //break()
        flag = false
      }
      if (flag == true) {
        println("校验失败，preCountNums数量为" + preCountNums + " afterCountNums数量为" + afterCountNums)
        preCountNums = afterCountNums
      }
    }

  }
}
