package com.github.dtkavin.bgq.qd

import org.apache.spark.sql.{SaveMode, SparkSession}

/**
  * Function: 日志源数据检查
  * #Author: john 
  * #Create: 2018 01 06 下午3:49 
  */
object OdsDetection {


  def main(args: Array[String]): Unit = {
    if (args.length < 2) {
      throw new IllegalArgumentException("parasm:  inpath outpath [dicNum: -1 means all, default 20]")
    }

    val inpath = args(0)
    val outpath = args(1)
    val dicNum = if (args.length == 3) args(2).toInt else 20

    val appName = "ods_detection"
    val spark = SparkSession.builder().appName(s"${appName}").getOrCreate()
    val inDF = spark.read.parquet(inpath)
    //计算日志总条数
    val totalCount = inDF.count()

    //输出各列的非空统计
    inDF.columns.filterNot(_.contains("""$""")).filterNot(_.contains("""record__value""")).map {
      column => {
        val notEmptyVal = inDF.select(column).filter(s"${column} is not null and ${column} != ''").count()
        if (notEmptyVal > 0) {
          //输出各列的字典
          val discinctDF = inDF.select(column).distinct()
          val columnDF = if (dicNum == -1) discinctDF else discinctDF.limit(dicNum)
          columnDF.repartition(1).write.mode(SaveMode.Overwrite).parquet(s"${outpath}/col=${column}")
        }
        (column, notEmptyVal)
      }
    }.foreach {
      kv => println(s"==> ${kv._1} : ${kv._2} - ${(kv._2 / totalCount) * 100.00000} - ${kv._2}/${totalCount}")
    }
    println(s"==> total_count : ${totalCount}")

    spark.stop()

  }
}
