package ds_recommended

import org.apache.spark.sql.SparkSession

import java.util.Properties

object province_top10_money {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local")
      .appName("前十的地区销售额")
      .getOrCreate()

    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/dwd?useSSL=false","order_info",connect)
      .createOrReplaceTempView("order_info")


    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false","shop_info",connect)
      .createOrReplaceTempView("shop_info")

    val result=spark.sql(
      """
        |select
        |r3.province,
        |concat(cast(cast((r3.province_money / 10000) as int ) as string),"万") as province_money
        |from(
        |select distinct
        |r2.province,
        |sum(r2.one_money) over(partition by r2.province) as province_money
        |from(
        |select
        |r1.province,
        |r1.price * r1.buy_number as one_money
        |from(
        |select
        |o.province,o.product_id,s.price,o.buy_number
        |from order_info as o
        |join shop_info as s
        |on s.product_id=o.product_id
        |) as r1
        |) as r2
        |) as r3
        |order by r3.province_money desc
        |limit 10
        |""".stripMargin)

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.67.193:3307/dws?useSSL=false","province_top10_money",connect)






    spark.close()
  }

}
