package DianShang_2024.ds_01.indicator

import org.apache.spark.sql.{DataFrame, SparkSession}

import java.util.Properties

object indicator04 {
  def main(args: Array[String]): Unit = {
    /*
            4、根据dwd层表统计在两天内连续下单并且下单金额保持增长的用户，存入MySQL数据库shtd_result的usercontinueorder表(表结构如下)中，然后在Linux的MySQL命
            令行中根据订单总数、订单总金额、客户主键均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，
            将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
     */

    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第四题")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd")

    //  准备连接mysql需要用到的配置
    val jdbc_conf=new Properties()
    jdbc_conf.setProperty("user","root")
    jdbc_conf.setProperty("password","123456")
    jdbc_conf.setProperty("driver","com.mysql.jdbc.Driver")

    //  在select后面的+号表示字段的值进行相加的操作

    val data:DataFrame=spark.sql(
      """
        |select
        |userid,
        |username,
        |concat(day,"_",next_day) as day,
        |today_price + next_price as totalconsumption,
        |today_count + next_count as totalorder
        |from (
        |select
        |userid,
        |username,
        |day,
        |today_price,
        |today_count,
        |lead(day)  over(partition by userid,username order by day) as next_day,
        |lead(today_price) over(partition by userid,username order by day) as next_price,
        |lead(today_count) over(partition by userid,username order by day) as next_count
        |from(
        |select
        |t1.user_id as userid,
        |t2.name as username,
        |date_format(t1.create_time,"yyyyMMdd") as day,
        |sum(t1.final_total_amount) as today_price,
        |count(*) as today_count
        |from
        |dwd.fact_order_info t1
        |join dim_user_info t2 on t1.user_id=t2.id and t1.etl_date='20231017' and t2.etl_date='20231017'
        |group by t1.user_id,t2.name,date_format(t1.create_time,"yyyyMMdd")
        |) as temp_a
        |) as temp_b
        |where cast(next_day as int) - cast(day as int)=1 and next_price > today_price
        |""".stripMargin)



    data.write.mode("overwrite").jdbc("jdbc:mysql://192.168.40.110:3306/shtd_result?useSSL=false","provinceavgcmp",jdbc_conf)

    //  关闭sparksql的环境
    spark.close()
  }

}
