package com.shujia.core

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo26PageRank {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo26PageRank")
    conf.setMaster("local")

    val sc: SparkContext = new SparkContext(conf)

    val sourcePageRankRDD: RDD[String] = sc.textFile("spark/data/pageRank.txt")

    // 将数据进行解析 以原始的页面作为Key，它指向的所有的页面（可能存在多个）作为Value（用List类型）
    val standPageRDD: RDD[(String, List[String])] = sourcePageRankRDD.map(line => {
      val splits: Array[String] = line.split("->")
      val page: String = splits(0)
      val pageList: List[String] = splits(1).split(",").toList
      (page, pageList)
    })

    val pages: Long = standPageRDD.count()

    // 给每个页面附上初始值 1
    var initPageRDD: RDD[(String, List[String], Double)] = standPageRDD.map(kv => (kv._1, kv._2, 1.0))

    var flag: Boolean = true
    var cnt: Int = 0
    val stopCondition: Double = 0.0000001
    val q: Double = 0.85

    while (flag) {
      cnt += 1
      val newPageRDD: RDD[(String, Double)] = initPageRDD
        .flatMap(t3 => {
          val pageList: List[String] = t3._2
          val pr: Double = t3._3
          pageList.map(page => (page, (1-0.85)/pages + (pr / pageList.size)*0.85))
        })
        .reduceByKey(_ + _)

      // 计算停止条件 当其小于0.0000001时 即收敛 停止迭代
      // 停止条件：平均误差
      val avgDiff: Double = initPageRDD
        .map(t3 => (t3._1, t3._3))
        .join(newPageRDD)
        .map(t2 => {
          val page: String = t2._1
          val oldPr: Double = t2._2._1
          val newPr: Double = t2._2._2
          // 用之前的Pr值减去当前的Pr值
          Math.abs(oldPr - newPr)
        }).sum() / pages

      println(s"第${cnt}计算出的平均误差为:$avgDiff")
      if (avgDiff < stopCondition) {
        flag = false
      }
      // 关联standPageRDD 构建 page,pageList,newPr 格式的RDD
      initPageRDD = newPageRDD.join(standPageRDD).map(kv => (kv._1, kv._2._2, kv._2._1))

    }

    initPageRDD.foreach(println)


  }

}
