package aggregate

import org.apache.spark.graphx._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

object Demo2_Method2 {

  def sendMsg(ec: EdgeContext[Int, String, Int]): Unit = {
    ec.sendToDst(ec.srcAttr + 1)
  }

  def mergeMsg(a: Int, b: Int): (Int) = {
    math.max(a, b)
  }

  def sumEdgeCount(g: Graph[Int, String], flag: Int): Graph[Int, String] = {
    val verts: VertexRDD[PartitionID] = g.aggregateMessages[Int](sendMsg, mergeMsg)

    val g2 = Graph(verts, g.edges)

    /**
      * 设置标识符flag=0 ，将每次所有点的距离求和与之对比，
      * 当sum=flag时,说明发不生变化，结束循环。
      */
    val sum: Int = g2.vertices.map(x => x._2.toString.toInt).reduce(_ + _)

    if (sum > flag) {
      var flag = sum
      sumEdgeCount(g2, flag)
    } else {
      g
    }
  }

  def main(args: Array[String]): Unit = {

    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    import spark.implicits._
    sc.setLogLevel("WARN")

    // 构建图
    val myVertices = sc.parallelize(Array(
      (1L, "张三"),
      (2L, "李四"),
      (3L, "王五"),
      (4L, "钱六"),
      (5L, "领导")))
    val myEdges = sc.makeRDD(Array(
      Edge(1L, 2L, "朋友"),
      Edge(2L, 3L, "朋友"),
      Edge(3L, 4L, "朋友"),
      Edge(4L, 5L, "上下级"),
      Edge(3L, 5L, "上下级")
    ))

    val myGraph = Graph(myVertices, myEdges)

    val initGraph: Graph[Int, String] = myGraph.mapVertices((_, _) => 0)
    println("--------------------结果如下：-------------------------")
    val verticesRDD: RDD[(VertexId, PartitionID)] = sumEdgeCount(initGraph, 0).vertices
    verticesRDD.sortByKey().toDF("vertex","max_distance").show()

    spark.stop()
  }

}
