package com.atguigu.member.service

import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * description ：将 dwd 层的数据进行组装宽表，service
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/3/6 15:19
 * modified By ：
 * version:    : 1.0
 */
object DwsMemberService {
  /**
   * 构建 dws_member_zipper 拉链表
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/6 15:31
   * @param spark
   * @param dt 分区表时间
   * @return java.lang.Object
   * @Version 1.0
   **/
  def buildDwsMemberZipper(spark: SparkSession, dt: String, dn: String) = {
    // 1、查询新增的支付和等级表中的数据，并合并成为有效的支付及新增数据 t1
    // 2、全量查询拉链表的每个用户的最后一条修改数据 t2
    // 3、查询拉链表中的其他数据（非最后一条修改数据）t3
    // 4、用 t1 union all t2
    //   4.1、开窗， 使用使用 uid 分组，创建时间排序
    //   4.2、将第一条数据的结束时间修改为第二数据的开始时间
    //   4.3、将第二条数据的金额修改为 第一 + 第二 的金额
    // 5、第4步结果 union all t3
    val maxTime = "9999-99-99"
    val sql =
    s"""
       |with t1 as (
       |        select
       |            uid,
       |            sum(cast(paymoney as decimal(10,4))) paymoney,
       |            max(vip_level) vip_level,
       |            '${dt}' start_time,
       |            '${maxTime}' end_time,
       |            '${dn}' dn
       |       from dwd.dwd_pcentermempaymoney dpp
       |       left join dwd.dwd_vip_level dvl on dpp.vip_id  =  dvl.vip_id  and dpp.dn = dvl.dn
       |       where dpp.dt = '${dt}' and dpp.dn = '${dn}'
       |       group by uid
       |   ),
       |    t2 as (
       |       select
       |           uid,
       |           paymoney,
       |           vip_level,
       |           start_time,
       |           end_time,
       |           '${dn}' dn
       |       from dws.dws_member_zipper dmz
       |       where dmz.dn = '${dn}' and dmz.end_time = '${maxTime}'
       |       ),
       |     t3 as (
       |        select
       |           uid,
       |           paymoney,
       |           vip_level,
       |           start_time,
       |           end_time,
       |           '${dn}' dn
       |       from dws.dws_member_zipper dmz
       |       where dmz.dn = '${dn}' and dmz.end_time != '${maxTime}'
       |       )
       |select * from t3
       |union all
       |select
       |   uid,
       |   sum(cast(paymoney as decimal(10,4))) over(partition by uid order by start_time) paymoney,
       |   vip_level,
       |   start_time,
       |   LEAD(start_time,1, '${maxTime}') over(partition by uid order by start_time) end_time,
       |   '${dn}' dn
       |from (select * from t1 union all select * from t2) tmp
       |order by uid, start_time
       |""".stripMargin

    // 查询组装拉链表， 将查询的结果覆盖原来的拉链表
    spark.sql(sql).coalesce(2).write.mode(SaveMode.Overwrite).insertInto("dws.dws_member_zipper");
  }

  /**
   * 构建 dws_member 宽表
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/6 15:31
   * @param spark
   * @param dt 分区表时间
   * @return java.lang.Object
   * @Version 1.0
   **/
  def buildDwsMemberWideTable(spark: SparkSession, dt: String) = {
    // 查询 dwd.dwd_member, dwd.dwd_base_ad, dwd.dwd_member_regtype, dwd.dwd_base_website, dwd.dwd_pcentermempaymoney, dwd.dwd_vip_level 组装宽表
    val sql =
      s"""
         |select
         |	uid, max(ad_id), max(fullname), max(iconurl), max(lastlogin), max(mailaddr), max(memberlevel),
         |	max(password), sum(cast(paymoney as decimal(10,4))), max(phone), max(qq), max(register), max(regupdatetime),
         |	max(unitname), max(userip), max(zipcode), max(appkey), max(appregurl), max(bdp_uuid), max(reg_createtime),
         |	max(domain), max(isranreg), max(regsource), max(regsourcename), max(adname), max(siteid), max(sitename),
         |	max(siteurl), max(delete), max(site_createtime), max(creator), max(vip_id), max(vip_level), min(start_time),
         |	max(end_time), max(last_modify_time), max(max_free), max(min_free), max(next_level), max(operator), dt, dn
         |from
         |	(
         |	select
         |		dwd_mb.uid, dwd_mb.ad_id, dwd_mb.fullname, dwd_mb.iconurl, dwd_mb.lastlogin, dwd_mb.mailaddr,
         |		dwd_mb.memberlevel, dwd_mb.password, dwd_pm.paymoney, dwd_mb.phone, dwd_mb.qq qq, dwd_mb.register,
         |		dwd_mb.regupdatetime, dwd_mb.unitname, dwd_mb.userip, dwd_mb.zipcode, dwd_dmr.appkey, dwd_dmr.appregurl,
         |		dwd_dmr.bdp_uuid, dwd_dmr.createtime reg_createtime, dwd_dmr.domain, dwd_dmr.isranreg, dwd_dmr.regsource,
         |		dwd_dmr.regsourcename, dwd_ba.adname, dwd_dbw.siteid, dwd_dbw.sitename, dwd_dbw.siteurl, dwd_dbw.delete,
         |		dwd_dbw.createtime site_createtime, dwd_dbw.creator, dwd_dvl.vip_id, dwd_dvl.vip_level, dwd_dvl.start_time,
         |		dwd_dvl.end_time, dwd_dvl.last_modify_time, dwd_dvl.max_free, dwd_dvl.min_free, dwd_dvl.next_level,
         |		dwd_dvl.operator, dwd_mb.dt, dwd_mb.dn
         |	from dwd.dwd_member dwd_mb
         |	left join dwd.dwd_base_ad dwd_ba on dwd_mb.ad_id = dwd_mb.ad_id and dwd_mb.dn = dwd_ba.dn
         |	left join dwd.dwd_member_regtype dwd_dmr on dwd_mb.uid = dwd_dmr.uid
         |	left join dwd.dwd_base_website dwd_dbw on dwd_dmr.websiteid = dwd_dbw.siteid and dwd_mb.dn = dwd_dmr.dn
         |	left join dwd.dwd_pcentermempaymoney dwd_pm on dwd_pm.uid = dwd_mb.uid and dwd_pm.dn = dwd_mb.dn
         |	left join dwd.dwd_vip_level dwd_dvl on dwd_dvl.vip_id = dwd_pm.vip_id and dwd_dvl.dn = dwd_mb.dn
         |	where dwd_mb.dt= '${dt}'
         |	) tmp
         |group by tmp.uid, tmp.dn, tmp.dt
         |""".stripMargin

    // 查询数据并入库到 dws.dws_member
    spark.sql("set hive.auto.convert.join=false") // 由于太多表join, 关闭 map join 防止内存溢出
    spark.sql(sql).coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dws.dws_member")
  }
}
