import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}


object price_ptv {
  def main(args: Array[String]): Unit = {
    val sparkconf = new SparkConf()
      .setMaster("local[*]")
    val sparksession = SparkSession
      .builder()
      .config(sparkconf)
      .enableHiveSupport()
      .appName("price_ptv")
      .getOrCreate()

  var df = sparksession.sql(
    """
      |select ord_qty,case
      |when  item_price < 500 then'500以下'
      |when  item_price < 1000 then '500-1000'
      |when  item_price < 1500 then'1000-1500'
      |when  item_price <2000 then'1500-2000'
      |when  item_price <3000 then '2000-3000'
      |when  item_price >= 3000 then '3000以上'
      |end level
      |from db_produce.tb_produce
      |""".stripMargin
  )
    val price_view = df.createTempView("price_view")
    val price_ptv_num = sparksession.sql("select level as name,count(*) as value from price_view group by level ")
    //dws_cardio_age.show()
      //-将结果进行存储，存储到 hive 数据库中

    price_ptv_num.write.mode(SaveMode.Overwrite).saveAsTable("db_produce.price_ptv_num")



    sparksession.stop()
    sparksession.close()
  }
}
