package HHD.Sgg.UserADS

/**
 * 龚英杰：
 * 统计2024-04-26起前3天、七天、十四天登录的用户，并统计这些天的平均用户登录率 以及 星期几最多用户登录，范围记录中只有一次登录的用户视为范围流失用户，统计流失用户的数量
 * 将结果存入 HHDtest.login3and7and14day
 * 字段               类型          说明
 * days               INT           天数
 * date_range          STRING        范围
 * max_login_in_week  STRING        星期几最多用户,如果有多个并列存在，则取集合，使用,分割
 * lost_users         STRING           范围流失用户数量
 * lost_user_rate     STRING        范围流失率(保留两位小数，xx.xx%)
 * range_login_rate     STRING      登录率(保留两位小数，xx.xx%)
 */

import org.apache.spark.sql.SparkSession

object UserLoginDate {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().getOrCreate()

    spark.sql(
      """
        |from(from(from(from HHDtest.fact_user_login l join HHDtest.dim_user_info u on l.user_id = u.id
        |   select
        |   distinct
        |       user_id,
        |       u.name user_name,
        |       date_format(login_time,'yyyy-MM-dd') login_time
        |   where login_time >= date_sub('2024-04-26',3) and login_time <= '2024-04-26')t1
        |select
        |       user_id,
        |       3 days,
        |       concat(date_sub('2024-04-26',3),'_','2024-04-26') date_range,
        |       if(count(user_id) over(partition by user_id) = 1,1,0)lost_users)t1
        |left join
        |(from HHDtest.fact_user_login
        |    select
        |       count(distinct user_id) cntuser
        |    group by user_id)t2
        |left join
        |(
        |from(from HHDtest.fact_user_login
        |    select
        |    distinct
        |       user_id,
        |       date_format(login_time,'EEEE')d)t1
        |select
        |   d,
        |   count(user_id) a
        |    group by d
        |)t3
        |select
        |     distinct
        |        user_id,
        |        days,
        |        date_range,
        |        lost_users,
        |        cntuser,
        |        collect_set(d) over() d,
        |        a)t
        |select
        |distinct
        |   days,
        |   date_range,
        |   lost_users,
        |   concat_ws(',',d) max_login_in_week,
        |   concat(round((lost_users/sum(cntuser) over())*100,2),'%') lost_user_rate,
        |   concat(round((sum(cntuser) over()/3)*100,2),'%') range_login_rate
        |""".stripMargin)
      .createOrReplaceTempView("v_3")




    spark.sql(
      """
        |from(from(from(from HHDtest.fact_user_login l join HHDtest.dim_user_info u on l.user_id = u.id
        |   select
        |   distinct
        |       user_id,
        |       u.name user_name,
        |       date_format(login_time,'yyyy-MM-dd') login_time
        |   where login_time >= date_sub('2024-04-26',7) and login_time <= '2024-04-26')t1
        |select
        |       user_id,
        |       7 days,
        |       concat(date_sub('2024-04-26',7),'_','2024-04-26') date_range,
        |       if(count(user_id) over(partition by user_id) = 1,1,0)lost_users)t1
        |left join
        |(from HHDtest.fact_user_login
        |    select
        |       count(distinct user_id) cntuser
        |    group by user_id)t2
        |left join
        |(
        |from(from HHDtest.fact_user_login
        |    select
        |    distinct
        |       user_id,
        |       date_format(login_time,'EEEE')d)t1
        |select
        |   d,
        |   count(user_id) a
        |    group by d
        |)t3
        |select
        |     distinct
        |        user_id,
        |        days,
        |        date_range,
        |        lost_users,
        |        cntuser,
        |        collect_set(d) over() d,
        |        a)t
        |select
        |distinct
        |   days,
        |   date_range,
        |   lost_users,
        |   concat_ws(',',d) max_login_in_week,
        |   concat(round((lost_users/sum(cntuser) over())*100,2),'%') lost_user_rate,
        |   concat(round((sum(cntuser) over()/7)*100,2),'%') range_login_rate
        |""".stripMargin)
      .createOrReplaceTempView("v_7")


    spark.sql(
      """
        |from(from(from(from HHDtest.fact_user_login l join HHDtest.dim_user_info u on l.user_id = u.id
        |   select
        |   distinct
        |       user_id,
        |       u.name user_name,
        |       date_format(login_time,'yyyy-MM-dd') login_time
        |   where login_time >= date_sub('2024-04-26',14) and login_time <= '2024-04-26')t1
        |select
        |       user_id,
        |       14 days,
        |       concat(date_sub('2024-04-26',14),'_','2024-04-26') date_range,
        |       if(count(user_id) over(partition by user_id) = 1,1,0)lost_users)t1
        |left join
        |(from HHDtest.fact_user_login
        |    select
        |       count(distinct user_id) cntuser
        |    group by user_id)t2
        |left join
        |(
        |from(from HHDtest.fact_user_login
        |    select
        |    distinct
        |       user_id,
        |       date_format(login_time,'EEEE')d)t1
        |select
        |   d,
        |   count(user_id) a
        |    group by d
        |)t3
        |select
        |     distinct
        |        user_id,
        |        days,
        |        date_range,
        |        lost_users,
        |        cntuser,
        |        collect_set(d) over() d,
        |        a)t
        |select
        |distinct
        |   days,
        |   date_range,
        |   lost_users,
        |   concat_ws(',',d) max_login_in_week,
        |   concat(round((lost_users/sum(cntuser) over())*100,2),'%') lost_user_rate,
        |   concat(round((sum(cntuser) over()/14)*100,2),'%') range_login_rate
        |""".stripMargin)
      .createOrReplaceTempView("v_14")

    spark.table("v_3")
      .unionAll(spark.table("v_7"))
      .unionAll(spark.table("v_14"))
      .createOrReplaceTempView("v_s")

    spark.sql("insert into table HHDtest.login3and7and14day select * from v_s")

  }
}
