package DianShang_2024.ds_server.indicator

import org.apache.spark.sql.SparkSession

object trait07 {
  def main(args: Array[String]): Unit = {
    /*
          7、请根据dwd或者dws层的数据，请计算连续两天下单的用户与已下单用户的占比，将结果存入ClickHouse数据库shtd_result的userrepurchasedrate表中
          (表结构如下)，然后在Linux的ClickHouse命令行中查询结果数据；
     */

    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第七题")
      .enableHiveSupport()
      .getOrCreate()

//    //  首先拿到所有下过单的用户的值
//   val AllUserNumber=spark.sql(
//      """
//        |select
//        |count(customer_id)
//        |from(
//        |select
//        |customer_id,
//        |count(*) as OneUser_number
//        |from dwd_server.fact_order_master
//        |group by customer_id
//        |)  as t1
//        |""".stripMargin).collect()(0).get(0)
//
//    println(AllUserNumber)
//
//    //  temp01:拿到每个用户的用户id，用户姓名，下单的时间,还有当前用户下单的次数，将只下过一次的过滤掉
//    /*
//          count(*) over(partition by t1.customer_id,t2.customer_name) as UserNumber
//         之所以需要向上面那样写，是因为user_id是当前层定义的，所以只能放到外层使用原表里面的customer_id,customer_name
//     */
//     spark.sql(
//       """
//         |select
//         |user_id,
//         |user_name,
//         |date_format(to_timestamp(create_time,'yyyyMMdd'),'yyyy-MM-dd') as create_time,
//         |UserNumber
//         |from(
//         |select
//         |t1.customer_id as user_id,
//         |t2.customer_name as user_name,
//         |t1.create_time as create_time,
//         |count(*) over(partition by t1.customer_id,t2.customer_name) as UserNumber
//         |from dwd_server.fact_order_master as t1
//         |join dwd_server.dim_customer_inf as t2
//         |on t1.customer_id=t2.customer_id
//         |) as t1
//         |where UserNumber>1
//         |""".stripMargin).createOrReplaceTempView("temp01")
//
//    spark.sql("select create_time from temp01  limit 20").show
//
//
//    /*
//            使用lead(create_time,1,create_time) over():函数得到下一行的create_time这样就可以知道下一天下单的日期
//                                                      第三个参数是如果找不到下一行的情况下用本身代替
//            temp02:拿到下单前一天和后一天的日期
//            lead(create_time,1) over(partition by user_id,user_name order by create_time) as next_time,
//     */
//
//    spark.sql(
//      """
//        |select
//        |user_id,
//        |user_name,
//        |create_time,
//        |UserNumber,
//        |lead(create_time) over(partition by user_id,user_name order by create_time) as next_time
//        |from(
//        |select
//        |user_id,
//        |user_name,
//        |date_format(to_timestamp(create_time,'yyyyMMdd'),'yyyy-MM-dd') as create_time,
//        |UserNumber
//        |from temp01
//        |)as t1
//        |""".stripMargin).createOrReplaceTempView("temp02")
//
//    spark.sql("select * from temp02    limit 20").show
//
//    //  根据create_time和next_time判断是否连续下单，是的话就写是，否则为不是
//    spark.sql(
//      """
//        |select
//        |user_id,
//        |user_name,
//        |UserNumber,
//        |if(datediff(create_time,next_time)=1,'是的','不是的') as panduan
//        |from temp02
//        |""".stripMargin).createOrReplaceTempView("temp03")
//
//    spark.sql("select * from temp03  where panduan='不是的' limit 20").show
//
////  将表格转化为id，姓名，连续下单的次数(为0的就是，没有连续下单的用户，只需要统计连续下单的)
//    spark.sql(
//      """
//        |select
//        |user_id,
//        |user_name,
//        |count(panduan) as two_number
//        |from temp03
//        |where panduan='不是的'
//        |group by user_id,user_name
//        |""".stripMargin).createOrReplaceTempView("temp04")
//
//    spark.sql("select * from temp04").show
//
//    //  做最后的数据合并,所有下单的用户,存在连续两天下单的用户数量，占比
//    val result_data=spark.sql(
//      """
//        |select
//        |(select
//        |count(customer_id)
//        |from(
//        |select
//        |customer_id,
//        |count(*) as OneUser_number
//        |from dwd_server.fact_order_master
//        |group by customer_id
//        |)  as t1) as max_user_number,
//        |count(user_id)
//        |from temp04
//        |where two_number!=0
//        |""".stripMargin)
//    result_data.createOrReplaceTempView("result_table")
//
//    spark.sql("select * from result_table ").show



   // 下面参考学长的代码逻辑
      //  temp01:首先拿到下完单并且没有退款的订单信息
          //  需要注意的是，查询的判断语句如果同时存在 where order_status='已下单'并且 order_sn not in (查询s，只有一列数据）这种情况，必须要把
          //   where order_status='已下单'放前面，而且查询s里面好像不可以使用!='已经退款' 需要使用=''
    spark.sql(
      """
        |select
        |*
        |from(
        |select
        |*
        |from dwd_server.fact_order_master
        |where order_status='已下单'
        |and order_sn not in (
        |select
        |order_sn
        |from dwd_server.fact_order_master
        |where order_status='已退款'
        |)
        |) as t1
        |where create_time is not null
        |""".stripMargin).createOrReplaceTempView("temp01")

    spark.sql("select * from temp01 limit 20").show

    //  根据所有已经下单未退款的订单进行查询，得到所有已经下单过的用户数量:19023
    val order_user_number=spark.sql(
      """
        |select
        |count(user_name)
        |from(
        |select
        |shipping_user as user_name,
        |count(*) as user_number
        |from temp01
        |group by shipping_user
        |) as t1
        |""".stripMargin)
    order_user_number.show()


    spark.sql(
      """
        |select
        |count(*)
        |from temp01
        |where create_time is null
        |""".stripMargin).show

    //  temp02:然后计算连续下单的用户数量
    spark.sql(
      """
        |select
        |count(*)
        |from(
        |select
        |user_name,
        |count(user_name)
        |from(
        |select
        |user_name,
        |date_sub(create_time,row_number) as chazhi,
        |count(1) as   continuous_order_number
        |from(
        |select
        |t1.user_name,
        |date_format(to_timestamp(t1.create_time,'yyyyMMdd'),'yyyy-MM-dd')  as create_time,
        |dense_rank() over(partition by t1.user_name order by date_format(to_timestamp(t1.create_time,'yyyyMMdd'),'yyyy-MM-dd')) as row_number
        |from(
        |select
        |shipping_user as user_name,
        |create_time
        |from temp01
        |) as t1
        |) as t2
        |group by user_name,chazhi
        |having continuous_order_number > 1
        |)as t3
        |group by user_name
        |) as t4
        |
        |""".stripMargin).show





    //  将数据加载到clickhouse








    //  关闭sparksql的环境
    spark.close()
  }

}
