package zhangwei.test

import org.apache.spark._
import org.apache.spark.SparkContext._

object PageRank {
  def main(args: Array[String]) {
    val master = args.length match {
      case x: Int if x > 0 => args(0)
      case _               => "local"
    }
    val sc = new SparkContext(master, "WordCount", System.getenv("SPARK_HOME"))

    // Assume that our neighbor list was saved as a Spark objectFile
    val links = sc.objectFile[(String, Seq[String])]("links")
      .partitionBy(new HashPartitioner(100))
      .persist()

    // Initialize each page’s rank to 1.0; since we use mapValues, the resulting RDD
    // will have the same partitioner as links
    var ranks = links.mapValues(v => 1.0)

    // Run 10 iterations of PageRank
    for (i <- 0 until 10) {
      val contributions = links.join(ranks).flatMap {
        case (pageId, (links, rank)) => links.map(dest => (dest, rank / links.size))
      }

      ranks = contributions.reduceByKey((x, y) => x + y).mapValues(v => 0.15 + 0.85 * v)

    }
    
    // Write out the final ranks
    ranks.saveAsTextFile("ranks")

  }

}