package ds_recommended

import org.apache.spark.sql.SparkSession

import java.util.Properties

object top5_product {
  def main(args: Array[String]): Unit = {
    /*
        统计销量前五的热门商品
     */
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("统计销量前五的热门")
      .getOrCreate()

    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

  //  读取处理好的数据
   spark.read
      .jdbc("jdbc:mysql://192.168.67.193:3307/dwd?useSSL=false","order_info",connect)
     .createOrReplaceTempView("order_info")

    spark.read
      .jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false","shop_info",connect)
      .createOrReplaceTempView("shop_info")


    //  所有商品的销量
    val all_count=spark.sql("select sum(buy_number) from order_info")
      .collect()(0).get(0).toString

    //  先计算出销量前5的五个商品id,销量，占比
    spark.sql(
      s"""
        |select
        |r2.product_id,r2.sell_number,
        |concat(
        |"0.",
        |substr(cast(r2.zhanbi as string),5,3),
        |"%"
        |) as zhanbi
        |from(
        |select
        |r1.*,
        |r1.sell_number /  cast(${all_count} as int)  as zhanbi
        |from(
        |select distinct
        |product_id,
        |sum(buy_number) over(partition by product_id) as sell_number
        |from order_info
        |order by sell_number desc
        |) as r1
        |order by r1.sell_number desc
        |limit 5
        |) as r2
        |""".stripMargin).createOrReplaceTempView("r1")

    //  和商品表合并得到商品的信息
    val result=spark.sql(
      """
        |select
        |r1.*,
        |r2.rating,
        |r2.price,
        |r2.title,
        |r2.image_url,
        |r2.class_name
        |from r1
        |join shop_info as r2
        |on r2.product_id=r1.product_id
        |order by sell_number desc
        |""".stripMargin)

    result.show

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.67.193:3307/dws?useSSL=false","top5_product",connect)



      spark.close()




  }

}
