package ds_recommended

import org.apache.spark.sql.SparkSession

import java.util.Properties

object user_avg_money {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("平均每个人的消费额度")
      .getOrCreate()


    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/dwd?useSSL=false","order_info",connect)
      .createOrReplaceTempView("order_info")


    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false","shop_info",connect)
      .createOrReplaceTempView("shop_info")

//  得到订单列表的所有用户id(去重后的)
   val user_number=spark.sql(
     """
       |select
       |count(r1.user_id)
       |from(
       |select distinct user_id from order_info
       |) as r1
       |""".stripMargin).collect()(0).get(0).toString

    val result=spark.sql(
      s"""
        |select
        |cast((r3.all_money / cast("${user_number}" as long)) as int) as user_avg_money
        |from(
        |select
        |sum(r2.one_money) as all_money
        |from(
        |select
        |r1.product_id,
        |r1.buy_number * r1.price as one_money
        |from(
        |select
        |o.product_id,o.buy_number,s.price
        |from order_info as o
        |join shop_info as s
        |on s.product_id=o.product_id
        |) as r1
        |) as r2
        |) as r3
        |""".stripMargin)


    result.show

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.67.193:3307/dws?useSSL=false","user_avg_money",connect)






    spark.close()
  }

}
