package join

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.graphx.{Edge, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD

object OuterJoinVerticesDemo {

  def main(args: Array[String]): Unit = {
    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    //设置users顶点
    val users: RDD[(VertexId, (String, Int))] =
      sc.parallelize(Array(
        (1L, ("Alice", 28)),
        (2L, ("Bob", 27)),
        (3L, ("Charlie", 65)),
        (4L, ("David", 42)),
        (5L, ("Ed", 55)),
        (6L, ("Fran", 50))))

    //设置relationships边
    val relationships: RDD[Edge[Int]] =
      sc.parallelize(Array(
        Edge(2L, 1L, 7),
        Edge(2L, 4L, 2),
        Edge(3L, 2L, 4),
        Edge(3L, 6L, 3),
        Edge(4L, 1L, 1),
        Edge(5L, 2L, 2),
        Edge(5L, 3L, 8),
        Edge(5L, 6L, 3)))


    // Build the initial Graph
    val graph = Graph(users, relationships)

    println("******************************************")
    graph.vertices.collect.foreach(println(_))
    println("******************************************")
    //通过获取图的顶点入度得到一个RDD
    var rdd: RDD[(VertexId, Boolean)] = sc.makeRDD(Array((4L,true),(1L,false),(6L,true),(3L,false),(2L,true)))
    rdd.collect.foreach(println(_))

    /**
      * 两个参数列表，第一个参数列表传入外部要关联的RDD数据
      * 第二个参数列接收一个map函数，用于处理关联上的数据
      */
    println("*****************************************************************************")
    println("自己构一份新的RDD顶点数据集（id,boolean) ，和1创建的图进行关联，改变原有顶点的age属性,")
    println("如果外部rdd的属性为true，把age+1，如果是false减1，如果没关联上则为0")
    var graph2: Graph[(String, Int), Int] = graph.outerJoinVertices(rdd)((id, attr, inNum) => {
      inNum match {
        case None => (attr._1, 0)
        case Some(x) =>if(x){
          (attr._1 + ": age+1", attr._2 + 1)
        } else{
          (attr._1 + ": age-1", attr._2 - 1)
        }
      }
    })

    graph2.vertices.collect.foreach(println(_))


  }
}
