package com.cluster.user.config


import org.apache.spark.{SparkConf, SparkContext}


object UrlTgiForBjrUvHostFilterWithUrl{
  def main(args: Array[String]): Unit = {
    // 设置 Spark 配置
    val sparkConf = new SparkConf().setAppName("UrlTgiForBx").setMaster("local[*]")
    val sc = new SparkContext(sparkConf)

    // 模拟数据
    val rddAll = sc.parallelize(Seq(
      ("www.baidu.com", ("neg", 1, 2, "www.baidu.com/htmlme")),
      ("www.zhihu.com", ("pos", 1, 3, "www.zhihu.com/htmlme")),
      ("www.baidu.com", ("neg", 1, 2, "www.baidu.com/xjla")),
      ("www.zhihu.com", ("pos", 1, 5, "www.zhihu.com/xjla")),
      ("www.baidu.com", ("pos", 1, 2, "www.baidu.com/htmlme")),
      ("www.zhihu.com", ("neg", 1, 3, "www.zhihu.com/htmlme")),
      ("www.baidu.com", ("neg", 1, 2, "www.baidu.com/xjla")),
      ("www.zhihu.com", ("neg", 1, 5, "www.zhihu.com/xjla"))
    ))

    val totalUvPos = rddAll.filter(_._2._1 == "pos").map(_._2._2.toInt).sum().toInt
    val totalUvNeg = rddAll.filter(_._2._1 == "neg").map(_._2._2.toInt).sum().toInt
    val total = totalUvPos + totalUvNeg

    // 转换并处理数据
    val rddFiltered = rddAll
      .groupByKey() // 先按 host 分组
      .mapValues { records =>
        var posUv = 0
        var negUv = 0
        var chosenUrl = ""
        var maxPv = 0

        // 遍历每个 host 的记录
        records.foreach { case (flag, uv, pv, url) =>
          // 累加 pos 和 neg 的 uv，并保留访问频次最高的 url
          if (flag == "pos") {
            posUv += uv
          } else if (flag == "neg") {
            negUv += uv
          }

          // 更新访问频次最大的 url
          if (pv > maxPv) {
            maxPv = pv
            chosenUrl = url
          }
        }

        // 生成 ratio，并返回所需的结构 ("ratio", ratio, uv, url)
        val ratio = s"$posUv|$negUv"
        ("ratio", ratio, total, chosenUrl)
      }

    // 打印结果
    rddFiltered.collect().foreach { case (host, (flag, ratio, uv, url)) =>
      println(s"$host, ($flag, $ratio, $uv, $url)")
    }

    // 停止 SparkContext
    sc.stop()


  }
}
