package ds_recommended

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

import java.util.Properties

object class_xiaoliang {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("统计每个大类的销量以及占比")
      .getOrCreate()

    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

  spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/dwd?useSSL=false","order_info",connect)
    .createOrReplaceTempView("order_info")

  spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false","shop_info",connect)
    .createOrReplaceTempView("shop_info")

    //  算出所有商品的销量
    val all_count=spark.sql("select sum(buy_number) from order_info")
      .collect()(0).get(0).toString

    val result=spark.sql(
      s"""
        |select
        |r3.class_id,
        |r3.class_name,
        |concat("2",".",substr(substr(cast(r3.zhanbi as string),4,6),2,2),"%") as zhanbi,
        |r3.class_number
        |from(
        |select
        |r2.class_id,
        |r2.class_name,
        |r2.class_number / cast(${all_count} as long) as zhanbi,
        |r2.class_number
        |from(
        |select distinct
        |s.class_id,
        |s.class_name,
        |sum(r1.one_number) over(partition by s.class_id,s.class_name) as class_number
        |from(
        |select distinct
        |product_id,
        |sum(buy_number) over(partition by product_id) as one_number
        |from order_info as o
        |) as r1
        |join shop_info as s
        |on s.product_id=r1.product_id
        |) as r2
        |) as r3
        |""".stripMargin)

//    result.show(200)
//
//
//      result.select("class_id","class_number").distinct().show(200)

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.67.193:3307/dws?useSSL=false","class_xiaoliang",connect)


    spark.close()
  }

}
