package com.cloud.core.graphx.demo

import org.apache.spark.graphx.util.GraphGenerators
import org.apache.spark.graphx.{Edge, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Graph_learning_03_neighbour_agg {

  var sc: SparkContext = null
  val master = "local"
  val appName = "Graph_learning_02"

  // 初始化SparkContext
  def init(): Unit = {

    val sparkConf = new SparkConf()
      .setMaster(master)
      .setAppName(appName)

    sc = new SparkContext(sparkConf)
  }

  def main(args: Array[String]): Unit = {

    init()

    val graph: Graph[Double, Int] =
      GraphGenerators.logNormalGraph(sc, numVertices = 100)
        .mapVertices((id, _) => id.toDouble)

    val olderFollowers: VertexRDD[(Int, Double)]
    = graph.aggregateMessages[(Int, Double)](
      triplet => { // Map Function
        if (triplet.srcAttr > triplet.dstAttr) {
          // Send message to destination vertex containing counter and age
          triplet.sendToDst((1, triplet.srcAttr))
        }
      },
      // Add counter and age
      (a, b) => (a._1 + b._1, a._2 + b._2) // Reduce Function
    )


    // Divide total age by number of older followers
    // to get average age of older followers
    val avgAgeOfOlderFollowers: VertexRDD[Double] =
      olderFollowers.mapValues((id, value) =>
        value match {
          case (count, totalAge) => totalAge / count
        })
    // Display the results
    println(avgAgeOfOlderFollowers.count())
    avgAgeOfOlderFollowers.collect.foreach(println(_))

  }
}
