package com.shujia.spark

import org.apache.spark.{SparkConf, SparkContext}

object Demo16PageRank {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName("Demo16PageRank")
      .setMaster("local")

    val sc = new SparkContext(conf)

    val data = sc.textFile("spark/data/pagerank.txt")


    //网页数量
    val N = data.count()


    //阻尼系数
    val Q = 0.85

    //每一个网页默认pr值
    var prRDD = data.map(line => {
      val page = line.split("\t")(0)
      (page, 1.0)
    })


    //网页出连列表
    val linkRDD = data.map(line => {
      val split = line.split("\t")
      val page = split(0)
      val link = split(1).split(",").toList
      (page, link)
    })


    var flag = true


    while (flag) {
      //当前pr值
      val newRDD = prRDD.join(linkRDD).map(kv => {
        val page = kv._1
        val pr = kv._2._1
        val link = kv._2._2

        //将当前网页pr值平分给出连列表
        val avgPr = pr / link.length.toDouble

        link.map(p => (p, avgPr))
      }).flatMap(line => line)
        .reduceByKey((x, y) => x + y)
        .map(kv => {
          val page = kv._1
          val pr = kv._2

          //增加阻尼系数
          val newPR = pr * Q + ((1 - Q) / N)
          (page, newPR)
        })


      /**
        * 判断收敛
        *
        * 计算所有网页差值平均值
        *
        */

      val incRDD = newRDD.join(prRDD).map(kv => {
        //上一次pr值
        val i = kv._2._2
        //当前pr值
        val j = kv._2._1

        Math.abs(i - j)
      })

      println("迭代======================================")

      //差值平均值
      val avg = incRDD.sum() / N

      if (avg < 0.01) {
        flag = false
      }


      //将当前计算结果赋值给迭代变量
      prRDD = newRDD
    }

    prRDD.foreach(println)


  }
}
