package com.saic.count

import java.util.Date
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.hive.HiveContext
import org.json.JSONObject
import com.saic.utils.HttpSaicUtils
import java.text.SimpleDateFormat


/**
 * @author ZhiLi
 */
object TableStaticCount {
  def main(args: Array[String]): Unit = {
    
    var sql = ""
    if(args.length == 2){
//     sql = "select * from "+args(0)+"."+args(1)
      sql = "select * from "+args(1)
    }else if (args.length == 3) {
      sql = "select * from "+args(1)+" where "+args(2)
    }else {
      exit()
    }
    println("Table SQL, "+sql)
    
    val sparkConf = new SparkConf().setAppName(args(0)+"."+args(1))
    val sc = new SparkContext(sparkConf)
    val hiveContext = new HiveContext(sc);
    hiveContext.sql("use "+args(0))
    val tableDF = hiveContext.sql(sql)
    val tableCount = tableDF.count()
    val schema = tableDF.schema
//    var colDF = tableDF.select(schema.fields.apply(1).name)
    
    var distinctCount:Long = 0
    var nullCount:Long = 0
    var nullStringCount:Long = 0
    var notNullCount:Long = 0
    var maxValue =""
    var minValue = ""
    var maxLength = 0
    var minLength = 0
    val indexDateFormat = new SimpleDateFormat("-yyyyMM");
    val timeFomat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val yyyymm = indexDateFormat.format(new Date)
    val date = System.currentTimeMillis()
    
    
    val detailsUrl = "http://10.32.47.108:9200/table-count"+yyyymm+"/details"
    val basicUrl = "http://10.32.47.108:9200/table-count"+yyyymm+"/basic"
    
    
    tableDF.registerTempTable("data_table_for_spark")
    schema.foreach { x =>
      val detailsJson = new JSONObject()
      val basicNotNullJson = new JSONObject()
      val basicNullJson = new JSONObject()
      val basicNullStrJson = new JSONObject()
      
      detailsJson.append("db_name", args(0))
      detailsJson.append("table_name", args(1))
      detailsJson.append("column_name", x.name.mkString)
      detailsJson.append("column_type", x.dataType.typeName)
      detailsJson.append("timestamp", date)
      
      
      val colDF = tableDF.select(x.name)
      distinctCount = colDF.distinct().count()
      nullCount = colDF.filter(x.name +"= null").count()
      nullStringCount = colDF.filter(x.name +"=''").count()
      notNullCount = tableCount-nullCount-nullStringCount
      colDF.describe(x.name).foreach { x =>  
        if(x.apply(0).toString().equals("max")){
          maxValue = x.apply(1)+""  
        }else if (x.apply(0).toString().equals("min")) {
          minValue = x.apply(1)+""
        }
      }

      maxLength = colDF.map { x => (x.apply(0)+"").length }.max()
      minLength = colDF.map { x => (x.apply(0)+"").length }.min()
      
      detailsJson.append("row_count", tableCount)
      detailsJson.append("row_discount", distinctCount)
      detailsJson.append("null_count", nullCount)
      detailsJson.append("null_stringcount", nullStringCount)
      detailsJson.append("notnull_count", notNullCount)
      detailsJson.append("max_value", maxValue)
      detailsJson.append("min_value", minValue)
      detailsJson.append("max_length", maxLength)
      detailsJson.append("min_length", minValue)
      
      
      basicNotNullJson.append("db_name", args(0))
      basicNotNullJson.append("table_name", args(1))
      basicNotNullJson.append("column_name", x.name.mkString)
      basicNotNullJson.append("column_type", x.dataType.typeName)
      basicNotNullJson.append("timestamp", date)
      basicNotNullJson.append("string_type", "not_null_count")
      basicNotNullJson.append("row_count", notNullCount)
      
      basicNullJson.append("db_name", args(0))
      basicNullJson.append("table_name", args(1))
      basicNullJson.append("column_name", x.name.mkString)
      basicNullJson.append("column_type", x.dataType.typeName)
      basicNullJson.append("timestamp", date)
      basicNullJson.append("string_type", "null_count")
      basicNullJson.append("row_count", nullCount)
      
      basicNullStrJson.append("db_name", args(0))
      basicNullStrJson.append("table_name", args(1))
      basicNullStrJson.append("column_name", x.name.mkString)
      basicNullStrJson.append("column_type", x.dataType.typeName)
      basicNullStrJson.append("timestamp", date)
      basicNullStrJson.append("string_type", "null_string_count")
      basicNullStrJson.append("row_count", nullStringCount)
      
      HttpSaicUtils.doPost(basicUrl, basicNotNullJson)
      HttpSaicUtils.doPost(basicUrl, basicNullJson)
      HttpSaicUtils.doPost(basicUrl, basicNullStrJson)
      
      HttpSaicUtils.doPost(detailsUrl, detailsJson)
      }
    
//    httpClient.close()
    hiveContext.dropTempTable("data_table_for_spark")
//    hiveContext.uncacheTable("data_table_for_spark")
    sc.stop()
  }
}