import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}


object price_num{
  def main(args: Array[String]): Unit = {
    val sparkconf = new SparkConf()
      .setMaster("local[*]")
    val sparksession = SparkSession
      .builder()
      .config(sparkconf)
      .enableHiveSupport()
      .appName("price_rat")
      .getOrCreate()

    var df = sparksession.sql(
      """
        |SELECT
        |    CASE
        |        WHEN price >= 0 AND price < 100 THEN '0-100元'
        |        WHEN price >= 100 AND price < 200 THEN '100-200元'
        |        WHEN price >= 200 AND price < 300 THEN '200-300元'
        |        WHEN price >= 300 AND price < 400 THEN '300-400元'
        |        WHEN price >= 400 AND price < 500 THEN '400-500元'
        |        WHEN price >= 500 AND price < 600 THEN '500-600元'
        |        WHEN price >= 600 AND price < 700 THEN '600-700元'
        |        WHEN price >= 700 AND price < 800 THEN '700-800元'
        |        WHEN price >= 800 AND price < 900 THEN '800-900元'
        |    END AS price,
        |    SUM(consume_count) AS total_consume_count
        |FROM
        |    db_minsu.tb_minsu
        |GROUP BY
        |    CASE
        |       WHEN price >= 0 AND price < 100 THEN '0-100元'
        |        WHEN price >= 100 AND price < 200 THEN '100-200元'
        |        WHEN price >= 200 AND price < 300 THEN '200-300元'
        |        WHEN price >= 300 AND price < 400 THEN '300-400元'
        |        WHEN price >= 400 AND price < 500 THEN '400-500元'
        |        WHEN price >= 500 AND price < 600 THEN '500-600元'
        |        WHEN price >= 600 AND price < 700 THEN '600-700元'
        |        WHEN price >= 700 AND price < 800 THEN '700-800元'
          |        WHEN price >= 800 AND price < 900 THEN '800-900元'
        |    END
        |""".stripMargin
    )
    val price_view = df.createTempView("price_view")
    val price_num = sparksession.sql("select * from price_view ")
    //dws_cardio_age.show()
    //-将结果进行存储，存储到 hive 数据库中

    price_num.write.mode(SaveMode.Overwrite).saveAsTable("db_minsu.price_num")



    sparksession.stop()
    sparksession.close()
  }
}
