package com.wtw.graph

import org.apache.spark.graphx.{Edge, Graph}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object JoinDemo {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val vertexArray = Array(
      (1L, ("Alice", 28)),
      (2L, ("Bob", 27)),
      (3L, ("Charlie", 65)),
      (4L, ("David", 42)),
      (5L, ("Ed", 55)),
      (6L, ("Fran", 50))
    )

    val edgeArray = Array(
      Edge(2L, 1L, 7),
      Edge(2L, 4L, 2),
      Edge(3L, 2L, 4),
      Edge(3L, 6L, 3),
      Edge(4L, 1L, 1),
      Edge(5L, 2L, 2),
      Edge(5L, 3L, 8),
      Edge(5L, 6L, 3)
    )

    //    （1）通过上面的项点数据和边数据创建图对象

    val vertexs: RDD[(Long, (String, Int))] = sc.parallelize(vertexArray)
    val edges = sc.parallelize(edgeArray)
    val graph = Graph(vertexs, edges)

    //    （2）自己构一份新的RDD顶点数据集（id,boolean) ，和1创建的图进行关联，
    //     改变原有顶点的age属性,如果外部rdd的属性为true，把age+1，如果是false减1，如果没关联上则为0

    val rdd: RDD[(Long, Boolean)] = sc.parallelize(List((1L, false), (2L, true), (3L, false), (4L, true), (5L, false)))

    //    id attr是graph的顶点id和属性, isAdd是rdd的属性
    //    id相同时则将graph的属性和rdd的属性进行关联 不相同就走None
    val out_graph = graph.outerJoinVertices(rdd)((id, attr, isAdd) => {
      isAdd match {
        case Some(x) => {
          if (x) (attr._1, attr._2 + 1)
          else (attr._1, attr._2 - 1)
        }

        case None => {
          (attr._1, 0)
        }
      }
    })

    out_graph.vertices.collect().foreach(println(_))
    println("-----------outerJoinVertices--------------")
  }
}
