package cn.doitedu

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, SparkSession}

import scala.collection.mutable.ListBuffer

// 我们的树的实质内容（节点）
case class PageView(
                     guid: Long,
                     session_id: String,
                     event_id: String,
                     page: String,
                     ref_page: String,
                     action_time: Long,
                   )

case class TreeNode(
                     guid: Long,
                     session_id: String,
                     event_id: String,
                     page: String,
                     ref_page: String,
                     action_time: Long,
                     // 很重要：用来引用本节点的子节点
                     children: ListBuffer[TreeNode] = ListBuffer.empty
                   )


case class PvContribution(guid:Long, session_id:String,page: String, direct_contribution: Long, whole_contribution: Long)


object Job02_PagePvContribution {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("页面pv共享量计算")
      .master("local")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._

    val df = spark.sql(
      """
        |
        |select
        |
        |guid,
        |session_id,
        |event_id,
        |properties['url'] as page,
        |properties['ref'] as ref_page,
        |action_time
        |
        |from tmp.page_cont_test2
        |where dt='2024-04-22'
        |
        |""".stripMargin)

    // 将df转成rdd
    val rdd: RDD[PageView] = df.as[PageView].rdd

    //
    val resultRdd = rdd.groupBy(pv => pv.session_id)
      .map(tp => tp._2)
      .flatMap(iter => {

        /**
         * 1,t1,page_load,properties{url:/a ,ref:null},children[]
         * 1,t2,page_load,properties{url:/b ,ref:/a},children[]
         * 1,t3,page_load,properties{url:/c ,ref:/a},children[]
         * 1,t4,page_load,properties{url:/d ,ref:/a},children[]
         * 1,t5,page_load,properties{url:/e ,ref:/c},children[]
         * 1,t6,page_load,properties{url:/f ,ref:/c},children[]
         * 1,t7,page_load,properties{url:/a ,ref:/f},children[]
         * 1,t8,page_load,properties{url:/w ,ref:/a},children[]
         */

        val nodeList = iter.toList
          .sortBy(pv => pv.action_time)
          .map(pv => TreeNode(pv.guid, pv.session_id, pv.event_id, pv.page, pv.ref_page, pv.action_time))
          .reverse


        // 遍历这个list，形成父子关系
        for (i <- 0 until nodeList.size - 1) {
          val currentNode: TreeNode = nodeList(i)

          // 从i+1开始往后找当前节点的爸爸
          var found = false;
          for (j <- i + 1 until nodeList.size if !found) {
            val nextNode = nodeList(j)

            // 如果当前节点的ref等于遍历到的这个节点的page，说明遍历到的这个节点是当前节点的爸爸
            if (currentNode.ref_page == nextNode.page) {
              // 把当前节点添加到父节点 children 列表中
              nextNode.children += currentNode

              found = true
            }

          }

        }

        // 计算本次会话中的每一个页面的 直接pv贡献量 和  总贡献量
        //   直接贡献量 = 节点的子节点数
        //   总贡献量 = 节点的子节点数 + 各子节点的总贡献量

        val results = ListBuffer.empty[PvContribution]
        getPvContribution(nodeList.last, results)

        results

      })


    val resultDf = resultRdd.toDF()

    /**
     * 上面的结果如下，它有一个特点： 一个会话中的一个页面可能就有多条结果
     * PvContribution(2,s2,/z,0,0)
     * PvContribution(2,s2,/q,0,0)
     * PvContribution(2,s2,/p,0,0)
     * PvContribution(2,s2,/y,2,2)
     * PvContribution(2,s2,/x,0,0)
     * PvContribution(2,s2,/c,3,5)
     * PvContribution(2,s2,/b,0,0)
     * PvContribution(2,s2,/a,2,7)
     * PvContribution(1,s1,/d,0,0)
     * PvContribution(1,s1,/w,0,0)
     * PvContribution(1,s1,/a,0,0)
     * PvContribution(1,s1,/f,2,2)
     * PvContribution(1,s1,/e,0,0)
     * PvContribution(1,s1,/c,2,4)
     * PvContribution(1,s1,/b,0,0)
     * PvContribution(1,s1,/a,3,7)
     */



    // 对一个会话中的相同页面进行聚合
    resultDf.createOrReplaceTempView("res")
    spark.sql(
      """
        |
        |select
        |   guid,
        |   session_id,
        |   page,
        |   sum(direct_contribution) as direct_contribution,
        |   sum(whole_contribution) as whole_contribution
        |
        |from res
        |group by
        |   guid,
        |   session_id,
        |   page
        |
        |""".stripMargin).show()

    /**
     * +----+----------+----+-------------------+------------------+
     * |guid|session_id|page|direct_contribution|whole_contribution|
     * +----+----------+----+-------------------+------------------+
     * |   1|        s1|  /a|                  3|                 7|
     * |   1|        s1|  /w|                  0|                 0|
     * |   1|        s1|  /f|                  2|                 2|
     * |   1|        s1|  /c|                  2|                 4|
     * |   1|        s1|  /e|                  0|                 0|
     * |   1|        s1|  /b|                  0|                 0|
     * |   1|        s1|  /d|                  0|                 0|
     *
     * |   2|        s2|  /a|                  2|                 7|
     * |   2|        s2|  /b|                  0|                 0|
     * |   2|        s2|  /x|                  0|                 0|
     * |   2|        s2|  /q|                  0|                 0|
     * |   2|        s2|  /p|                  0|                 0|
     * |   2|        s2|  /z|                  0|                 0|
     * |   2|        s2|  /y|                  2|                 2|
     * |   2|        s2|  /c|                  3|                 5|
     * +----+----------+----+-------------------+------------------+
     */



    spark.stop()
  }


  def getPvContribution(node: TreeNode, results: ListBuffer[PvContribution] ): PvContribution = {

    //   直接贡献量 = 节点的子节点数
    val zhiJie = node.children.size

    //   总贡献量 = 节点的子节点数 + 各子节点的总贡献量
    var childWhole = 0L

    // 这里遍历子节点，也是递归的退出口
    for (cnd <- node.children) {
      childWhole += getPvContribution(cnd ,results).whole_contribution
    }

    val res = PvContribution(node.guid,node.session_id, node.page, zhiJie, zhiJie + childWhole)
    results += res

    res
  }


}
