package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo21PageRank {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("pagerank")


    val sc = new SparkContext(conf)

    /**
      * 读取数据
      *
      */

    val data: RDD[String] = sc.textFile("Spark/data/pagerank.txt")


    /**
      * 整理数据,给每隔网页一个初始pr值
      *
      */

    val pageLinkRDD: RDD[(String, List[String])] = data.map(line => {
      val split: Array[String] = line.split("-")
      //当前网页
      val page: String = split(0)

      //出链列表
      val linked: List[String] = split(1).split(",").toList
      (page, linked)
    })


    //对多次使用的RDD进行缓存
    pageLinkRDD.cache()


    //计算网页数量
    val N: Long = pageLinkRDD.count()

    val q = 0.85


    /**
      * 给每一个网页一个初始pr值
      *
      */

    var pageRDD: RDD[(String, List[String], Double)] = pageLinkRDD.map {
      case (page, linked) =>
        (page, linked, 1.0)
    }


    var flag = true

    while (flag) {

      /**
        * 将每一个网页pr值平分给他的出链列表
        *
        * List((B,0.5), (D,0.5))
        * List((C,1.0))
        * List((A,0.5), (B,0.5))
        * List((B,0.5), (C,0.5))
        *
        */

      val avgPrRDD: RDD[List[(String, Double)]] = pageRDD.map {
        case (page, linked, pr) =>
          //计算分给出链列表多少pr值
          val avgPr: Double = pr / linked.length

          linked.map(p => (p, avgPr))
      }

      /**
        * 把数据展开
        * (B,0.5)
        * (D,0.5)
        * (C,1.0)
        * (A,0.5)
        * (B,0.5)
        * (B,0.5)
        * (C,0.5)
        *
        */
      val avgPrFlatMapRDD: RDD[(String, Double)] = avgPrRDD.flatMap(x => x)


      /**
        * 计算每一个网页新的pr值
        * (B,1.5)
        * (A,0.5)
        * (C,1.5)
        * (D,0.5)
        *
        */
      val newPrRDD: RDD[(String, Double)] = avgPrFlatMapRDD.reduceByKey(_ + _)


      /**
        * 关联出链列表
        *
        */
      val joinRDD: RDD[(String, (Double, List[String]))] = newPrRDD.join(pageLinkRDD)


      //整理数据,
      val currPageRDD: RDD[(String, List[String], Double)] = joinRDD.map {
        case (page: String, (pr: Double, link: List[String])) =>

          //增加阻尼系数
          (page, link, (1 - q) / N + q * pr)
      }


      /**
        * 计算当前网pr和上一次所有网页pr值的差值平均值
        *
        */
      val currKV: RDD[(String, Double)] = currPageRDD.map {
        case (page, link, pr) => (page, pr)
      }

      val lastKV: RDD[(String, Double)] = pageRDD.map {
        case (page, link, pr) => (page, pr)
      }

      val prJoinRDD: RDD[(String, (Double, Double))] = currKV.join(lastKV)

      //计算所有网页pr值和上一次pr值的差值
      val pageChaRDD: RDD[Double] = prJoinRDD.map {
        case (page: String, (currPr: Double, lastPr: Double)) =>
          Math.abs(currPr - lastPr)
      }

      //计算差值平均值
      val chaAge: Double = pageChaRDD.sum() / N


      println(s"差值平均值：$chaAge")
      if (chaAge < 0.0001) {

        flag = false
      }

      // 将计算结果赋值给前面的rdd， 下一次迭代使用上一次的结果
      pageRDD = currPageRDD

    }

    pageRDD.foreach(println)
  }

}
