package GraghX.join

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.graphx.{Edge, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
import spire.std.map

object joinVerticesDemo {
  def main(args: Array[String]): Unit = {
    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    //设置users顶点
    val users: RDD[(VertexId, (String, Int))] =
      sc.parallelize(Array((3L, ("rxin", 1000)), (7L, ("jgonzal", 1000)), (5L, ("franklin", 1000)), (2L, ("istoica", 1000))))

    //设置relationships边
    val relationships: RDD[Edge[String]] =
      sc.parallelize(Array(Edge(3L, 7L, "collab"), Edge(5L, 3L, "advisor"), Edge(2L, 5L, "colleague"), Edge(5L, 7L, "pi")))


    // Build the initial Graph
    val graph = Graph(users, relationships)

    //通过获取图的顶点入度得到一个RDD
    var rdd: VertexRDD[Int] = graph.inDegrees
    rdd.collect.foreach(println(_))
    println("******************************************")
    graph.vertices.collect.foreach(println(_))

    //两个参数列表，第一个参数列表传入外部要关联的RDD数据
    //              第二个参数列接收一个map函数，用于处理关联上的数据
    var graph2: Graph[(String, Int), String] = graph.joinVertices(rdd)((id, attr, inNum) => ((attr._1, attr._2 * (inNum + 1))))
    println("******************************************")
    graph2.vertices.collect.foreach(println(_))


  }

}