package ds_recommended

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

import java.util.Properties

object year_month_day {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("分别计算年销售额度和月销售最大的额度，以及最大的天销售额度")
      .getOrCreate()

    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")


    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/dwd?useSSL=false","order_info",connect)
      .createOrReplaceTempView("order_info")

    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false","shop_info",connect)
      .createOrReplaceTempView("shop_info")

    val year="2024"

    //  拿到全年的销售额度
    val r1=spark.sql(
      s"""
        |select
        |${year} as year,
        |concat(cast(cast((sum(r2.one_money) / 10000) as int) as string),"万") as all_money
        |from(
        |select
        |r1.product_id,
        |r1.buy_number * r1.price as one_money
        |from(
        |select
        |o.product_id,
        |o.buy_number,
        |s.price
        |from order_info as o
        |join shop_info as s
        |on s.product_id=o.product_id
        |) as r1
        |) as r2
        |""".stripMargin)


    //  得到每一个月的销售额
   val r2= spark.sql(
      """
        |select
        |r3.month,
        |concat(cast(cast(r3.month_money as int) as string),"万") as month_money
        |from(
        |select distinct
        |r2.month,
        |(sum(r2.money) over(partition by r2.month)) / 10000 as month_money
        |from(
        |select
        |r1.month,
        |r1.price * r1.buy_number as money
        |from(
        |select
        |o.product_id,
        |o.buy_number,
        |month(o.buy_time) as month,
        |s.price
        |from order_info as o
        |join shop_info as s
        |on s.product_id=o.product_id
        |) as r1
        |) as r2
        |order by month_money desc
        |limit 1
        |) as r3
        |""".stripMargin)


    //  分析销售额最高的一天的值
   val r3= spark.sql(
      """
        |select
        |r4.month,r4.day,
        |concat(cast(cast(r4.day_money / 10000 as int) as string),"万") as day_money
        |from(
        |select
        |r3.*
        |from(
        |select distinct
        |r2.month,r2.day,
        |sum(r2.money) over(partition by r2.month,r2.day) as day_money
        |from(
        |select
        |r1.month,r1.day,
        |r1.buy_number * r1.price as money
        |from(
        |select
        |o.product_id,o.buy_number,
        |month(o.buy_time) as month,
        |day(o.buy_time) as day,
        |s.price
        |from order_info as o
        |join shop_info as s
        |on s.product_id=o.product_id
        |) as r1
        |) as r2
        |) as r3
        |order by r3.day_money desc
        |limit 1
        |) as r4
        |""".stripMargin)

    r1.show
    r2.show
    r3.show


    r1.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.67.193:3307/dws?useSSL=false","year_money",connect)


    r2.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.67.193:3307/dws?useSSL=false","month_money",connect)

    r3.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.67.193:3307/dws?useSSL=false","day_money",connect)












    spark.close()
  }

}
