package com.saic.count

import java.util
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.hive.HiveContext

/**
  * Created by ThinkPad on 2017/3/21.
  */
object AsicFromHiveToElsql {
  def main(args: Array[String]): Unit = {
    
    //设置map存放指标，便于放入ELsearch中
    val mapColnameAndType=new util.HashMap[String,String]()//列名和类型名
    val mapColnumValueMax=new util.HashMap[String,Integer]()//列值中的最大值
    val mapColumValueMin=new util.HashMap[String,Integer]()//列值中的最小值
    val mapfenbu=new util.HashMap[String,Integer]()//列值比值分布
    val mapNullRate=new util.HashMap[String,Integer]()//列值空值比列
    val mapNotNullCount=new util.HashMap[String,Integer]()//列值非空值个数
    val mapColMaxLength=new util.HashMap[String,Integer]()//列值字段最大长度
    val mapColMinlLength=new util.HashMap[String,Integer]()//列值最小字段长度
    val mapNullCount=new util.HashMap[String,Integer]()//列值空值数目
    val mapBlankCount=new util.HashMap[String,Integer]()//列值空白数目

  //EL集群配置参数
 /*   val ip="10.32.47.108,10.32.47.109,10.32.47.110,10.32.47.111"
    ElsearchUtils.setIP(ip)
    ElsearchUtils.setCluster("node-1", "true")
    ElsearchUtils.init()
    val client = ElsearchUtils.getClient
*/
    //sparkconf与hiveContext配置
    val conf = new SparkConf().setAppName(args(0)+"."+args(1))
    val sc: SparkContext = new SparkContext(conf)
    val sqlcontext = new HiveContext(sc)
    val hiveContext = new HiveContext(sc);
    hiveContext.sql("use "+args(0))
    val tableDF = hiveContext.sql("select * from " + args(1) + " limit 100000")
    if (args.length == 3) {
      val tableDF = hiveContext.sql("select * from " + args(1) + " where "+args(2))
    }
    tableDF.registerTempTable("data_table_for_spark")
    hiveContext.cacheTable("data_table_for_spark")//获得表并缓存
//    tableDF.cache()//缓存tableDF？？这样做会不会出问题？？？
    val columns: Array[String] = tableDF.columns//获取列
    tableDF.schema.map(x=>mapColnameAndType.put(x.name,x.dataType.typeName))
//    ElsearchUtils.addelemtsStr(client,mapColnameAndType,"test","test")//指标1推入EL
    println(mapColnameAndType)

    val colcont: Long = tableDF.count()//获取记录条数
    columns.map(columName=>{
      //val maxValue = hiveContext.sql(s"select max($columname) from data_table_for_spark ").rdd.take(1)
      //val minValue = hiveContext.sql(s"select min($columname) from data_table_for_spark ").rdd.take(1)
      mapColnumValueMax.put(columName,hiveContext.sql(s"select max($columName) from data_table_for_spark ").rdd.take(1).toString.toInt)
      mapColumValueMin.put(columName,hiveContext.sql(s"select min($columName) from data_table_for_spark ").rdd.take(1).toString.toInt)
      mapfenbu.put(columName,(tableDF.select(columName).rdd.distinct().count()/colcont).toInt)
      mapNullRate.put(columName,(tableDF.select(columName).rdd.filter(_==null).count()/colcont).toInt)
      mapNotNullCount.put(columName,(tableDF.select(columName).rdd.filter(_!=null).count()).toInt)
      mapColMaxLength.put(columName,tableDF.select(columName).rdd.sortBy(_.toString().length).first().toString().length)
      mapColMinlLength.put(columName,tableDF.select(columName).rdd.sortBy(_.toString().length,false).first().toString().length)
      mapNullCount.put(columName,tableDF.select(columName).rdd.filter(_==null).count().toInt)
      mapBlankCount.put(columName,tableDF.select(columName).rdd.filter(_=="").count().toInt)
      
      
      println(mapColnumValueMax)
      println(mapColumValueMin)
      println(mapfenbu)
      println(mapNullRate)
      println(mapNotNullCount)
      println(mapColMaxLength)
      println(mapColMinlLength)
      println(mapNullCount)
      println(mapBlankCount)
      
    })
    
    
    //剩余指标推入EL
 /*   ElsearchUtils.addelemtsInteger(client,mapColnumValueMax,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapColumValueMin,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapfenbu,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapNullRate,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapNotNullCount,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapColMaxLength,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapColMinlLength,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapNullCount,"test","test")
    ElsearchUtils.addelemtsInteger(client,mapBlankCount,"test","test")
*/
    
    
//    tableDF.unpersist()//释放内存
    hiveContext.uncacheTable("data_table_for_spark")//释放表
    sc.stop()//结束
  }
}
