import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}


object area_num {
  def main(args: Array[String]): Unit = {
    val sparkconf = new SparkConf()
      .setMaster("local[*]")
    val sparksession = SparkSession
      .builder()
      .config(sparkconf)
      .enableHiveSupport()
      .appName("price_ptv")
      .getOrCreate()

    var df = sparksession.sql(
      """
        SELECT
        |    CASE
        |        WHEN sales_region_code = 101 THEN '河南'
        |        WHEN sales_region_code = 102 THEN '上海'
        |        WHEN sales_region_code = 103 THEN '江西'
        |        WHEN sales_region_code = 104 THEN '北京'
        |        WHEN sales_region_code = 105 THEN '天津'

        |    END AS area,
        |    SUM(ord_qty) AS total_demand
        |FROM
        |    db_produce.tb_produce
        |GROUP BY
        |    CASE
        |      WHEN sales_region_code = 101 THEN '河南'
        |        WHEN sales_region_code = 102 THEN '上海'
        |        WHEN sales_region_code = 103 THEN '江西'
        |        WHEN sales_region_code = 104 THEN '北京'
        |        WHEN sales_region_code = 105 THEN '天津'
        |    END
        |ORDER BY
        |    area;
        |""".stripMargin
    )
    val area_view = df.createTempView("area_view")
    val area_num = sparksession.sql("select area,total_demand from area_view ")
    //dws_cardio_age.show()
    //-将结果进行存储，存储到 hive 数据库中

    area_num.write.mode(SaveMode.Overwrite).saveAsTable("db_produce.area_num")



    sparksession.stop()
    sparksession.close()
  }
}
