package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo25PageRank {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName(this.getClass.getSimpleName.replace("$", ""))
    val sc = new SparkContext(conf)

    // 通过List[String]表示页面的关系
    val pageList: List[String] = List[String]("A->B,D", "B->C", "C->A,B", "D->B,C")

    // 收敛阈值
    val yz: Double = 0.000001

    // 构建RDD
    val rdd: RDD[String] = sc.parallelize(pageList)

    val pageRDD: RDD[(String, List[String])] = rdd.map(line => {
      val splits: Array[String] = line.split("->")
      val page: String = splits(0)
      val outPageList: List[String] = splits(1).split(",").toList
      (page, outPageList)
    })

    // 给每个页面赋上PR值1
    var pagePrRDD: RDD[(String, List[String], Double)] = pageRDD.map(t2 => (t2._1, t2._2, 1.0))

    var condition: Boolean = true
    var cnt: Int = 1
    while (condition) {
      // 计算每个页面新的Pr值
      val newPrRDD: RDD[(String, Double)] = pagePrRDD.flatMap(t3 => {
        val pl: List[String] = t3._2
        val avgPr: Double = t3._3 / pl.size
        pl.map(p => (p, avgPr))
      }).reduceByKey(_ + _)

      println(s"经过了${cnt}次迭代，每个页面新的Pr值如下：")
      newPrRDD.foreach(println)

      // 计算平均误差
      val avgDiff: Double = pagePrRDD
        .map(t3 => (t3._1, t3._3))
        .join(newPrRDD)
        .map(t2 => Math.abs(t2._2._1 - t2._2._2))
        .sum() / pageList.size

      println(s"当前的平均误差为:$avgDiff")

      if (avgDiff < yz) {
        // 停止循环迭代
        condition = false
      } else {
        // 基于新的Pr值构建新的pagePrRDD
        val newPagePrRDD: RDD[(String, List[String], Double)] = newPrRDD
          .join(pageRDD)
          .map(t2 => (t2._1, t2._2._2, t2._2._1))
        pagePrRDD = newPagePrRDD
      }

      cnt += 1
    }
  }

}
