package com.pro

import com.`trait`._
import com.utils.{ConfigUtils, DateUtils, KuduUtils}
import org.apache.kudu.client.CreateTableOptions
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.spark.sql.{DataFrame, SparkSession}
import shapeless.PolyDefns.->

/**
  * 统计个省市的地域分布情况
  */
object ProviceCityAnalysis extends Process{
  //指明数据读取表
  val SOURCE_TABLE = s"ODS_${DateUtils.getYesterday()}"
  //指明kudu的配置参数信息
  val options = Map[String,String](
    "kudu.master" ->ConfigUtils.KUDU_MASTER,
    "kudu.table" ->SOURCE_TABLE
  )
  //指明指标数据保存表
  val SINK_TABLE = s"provice_city_${DateUtils.getNow()}"
  override def process(spark: SparkSession, kuduContext: KuduContext): Unit = {
    //获取源数据
    import org.apache.kudu.spark.kudu._
    spark.read.options(options).kudu.createOrReplaceTempView("t_table_info")

    val result: DataFrame = spark.sql(
      """
        |select proviceName,city,count(1) as num from t_table_info group by proviceName,city
      """.stripMargin)
    val columns: Seq[String] = Seq[String]("proviceName","city")

    val option = new CreateTableOptions()
    import scala.collection.JavaConverters._
    option.addHashPartitions(columns.asJava,3)
    option.setNumReplicas(3)

    KuduUtils.writeToKudu(kuduContext,result.schema,option,SINK_TABLE,columns,result)


  }
}
