import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}


object district_num {
  def main(args: Array[String]): Unit = {
    val sparkconf = new SparkConf()
      .setMaster("local[*]")
    val sparksession = SparkSession
      .builder()
      .config(sparkconf)
      .enableHiveSupport()
      .appName("price_rat")
      .getOrCreate()

    var df = sparksession.sql(
      """
        |SELECT
        |    CASE
        |        WHEN district_name = '天河区' THEN '天河区'
        |        WHEN district_name = '越秀区' THEN '越秀区'
        |        WHEN district_name = '白云区' THEN '白云区'
        |        WHEN district_name = '番禺区' THEN '番禺区'
        |        WHEN district_name = '海珠区' THEN '海珠区'
        |        WHEN district_name = '荔湾区' THEN '荔湾区'
        |        WHEN district_name = '增城区' THEN '增城区'
        |        WHEN district_name = '从化区' THEN '从化区'
        |
        |    END AS area,
        |    SUM(consume_count) AS total_consume_count
        |FROM
        |    db_minsu.tb_minsu
        |GROUP BY
        |    CASE
        |        WHEN district_name = '天河区' THEN '天河区'
        |        WHEN district_name = '越秀区' THEN '越秀区'
        |        WHEN district_name = '白云区' THEN '白云区'
        |        WHEN district_name = '番禺区' THEN '番禺区'
        |        WHEN district_name = '海珠区' THEN '海珠区'
        |        WHEN district_name = '荔湾区' THEN '荔湾区'
        |        WHEN district_name = '增城区' THEN '增城区'
        |        WHEN district_name = '从化区' THEN '从化区'
        |    END
        |""".stripMargin
    )
    val district_view = df.createTempView("district_view")
    val district_num = sparksession.sql("select area,total_consume_count from district_view ")
    //dws_cardio_age.show()
    //-将结果进行存储，存储到 hive 数据库中

    district_num.write.mode(SaveMode.Overwrite).saveAsTable("db_minsu.district_num")



    sparksession.stop()
    sparksession.close()
  }
}
