package my_project.business

import my_project.util.{KuduUtils, SQLUtils, SchemaUtils}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object ProvinceCityStatApp {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .master("local").appName("ProvinceCityStatApp")
      .getOrCreate()

    //从kudu中读取数据，按城市、省份分组统计即可
    val masterAddresses = "hadoop000"

    val odsDF: DataFrame = spark.read.format("org.apache.kudu.spark.kudu")
      .option("kudu.master", masterAddresses)
      .option("kudu.table", "ods")
      .load()

    odsDF.createOrReplaceTempView("ods")
    val result: DataFrame = spark.sql(SQLUtils.PROVINCE_CITY_SQL)


    val tableName = "provincename_city_stat"
    val partitionId = "provincename"
    KuduUtils.sink(result, tableName, masterAddresses, SchemaUtils.ProvinceCitySchema, partitionId)


    spark.stop()

  }

}
