package com.llx.dataset

import org.apache.flink.api.java.aggregation.Aggregations
import org.apache.flink.api.scala._
import org.apache.flink.util.Collector
/**
  * description
  *
  * @author Llx
  * @version v1.0.0
  * @since 2018/12/3
  */
object PageRankScala {

  def main(args: Array[String]): Unit = {
    val numPages =10 //总页面数
    val DAMPENING_FACTOR =0.1
    val EPSILON =0.1
    // User-defined types
    case class Link(sourceId: Long, targetId: Long)
    case class Page(pageId: Long, rank: Double)
    case class AdjacencyList(sourceId: Long, targetIds: Array[Long])

    // set up execution environment
    val env = ExecutionEnvironment.getExecutionEnvironment

    // 历史排名（pageID，网页的初始分数） 读取文件read pagesInputPath the pages and initial ranks by parsing a CSV file
    val pages = env.readCsvFile[Page]("")

    // 新访问的页面（pageId,相邻的网页ID） linksInputPath the links are encoded as an adjacency list: (page-id, Array(neighbor-ids))
    val links = env.readCsvFile[Link]("")

    // 历史排行（pageID，当前网页占总页面的百分比） 每个页面给一个初始化排行因子 assign initial ranks to pages
    val pagesWithRanks = pages.map(p => Page(Long.unbox(p), 1.0 / numPages))

    //同一个访问页面内，进入到下一个页面的个数（可以不相同的页面）
    // 1、得到相同source页面对应的target页面的个数
    // 2、join操作，获取排行前10的历史与相同源关联，遍历下一个页面，求出下一个页面的排行（targetId，之前的排行分数/target页面的个数）
    // 3、再对这些新页面进行，分组，聚合求和
    // 4、再map 计算，求出新的排行

    //新访问的页面，得到（source{pageId}，targetNum(访问量)） build adjacency list from link input
    val adjacencyLists = links
      // 每个页面组织成（源头，Array（页面ID）），Array就一个元素一开始，为了以后reduce中做加法操作，最终结果还是一个array    initialize lists
      .map(e => AdjacencyList(e.sourceId, Array(e.targetId)))
      // groupBy + reduce ==》 reduceByKey（）   concatenate lists
      .groupBy("sourceId").reduce {
      //l1 l2是根据sourceId 分组后，生成的多个集合中的，遍历中的前一个元素与后一个元素 ===》reduceByKey（）
      (l1, l2) => AdjacencyList(l1.sourceId, l1.targetIds ++ l2.targetIds)
    }

    // iterateWithTermination 遍历多少个元素每遍历一次，做一次结束最后做一次Termination ；start iteration
    val finalRanks = pagesWithRanks.iterateWithTermination(10) {
      currentRanks =>
        val newRanks = currentRanks
          // distribute ranks to target pages
          .join(adjacencyLists).where("pageId").equalTo("sourceId") {
          //join后 (Page，AdjacencyList，输出集合)
          (page, adjacent, out: Collector[Page]) =>
            for (targetId <- adjacent.targetIds) {
              //page（相邻页面ID，）
              out.collect(Page(targetId, page.rank / adjacent.targetIds.length))
            }
        }
          // collect ranks and sum them up
          .groupBy("pageId").aggregate(Aggregations.SUM, "rank")
          // apply dampening factor
          .map { p =>
          Page(p.pageId, (p.rank * DAMPENING_FACTOR) + ((1 - DAMPENING_FACTOR) / numPages))
        }

        // terminate if no rank update was significant
        val termination = currentRanks.join(newRanks).where("pageId").equalTo("pageId") {
          (current, next, out: Collector[Int]) =>
            // check for significant update
            if (math.abs(current.rank - next.rank) > EPSILON) out.collect(1)
        }
        (newRanks, termination)
    }

    val result = finalRanks

    // emit result outputPath
    result.writeAsCsv("", "\n", " ")
  }
}
