package join

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.graphx.{Edge, Graph, VertexId, VertexRDD}
import org.apache.spark.rdd.RDD
/**
  * Created by danke on 2020/4/9.
  */
object OuterJoinVerticesDemo {
  def main(args: Array[String]): Unit = {
    //设置运行环境
    val conf = new SparkConf().setAppName("SimpleGraphX").setMaster("local")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    //设置users顶点
    val users: RDD[(VertexId, (String, Int))] =
    sc.parallelize(Array(
      (1L, ("Alice", 28)),
      (2L, ("Bob", 27)),
      (3L, ("Charlie", 65)),
      (4L, ("David", 42)),
      (5L, ("Ed", 55)),
      (6L, ("Fran", 50))
    ))

    //设置relationships边
    val relationships: RDD[Edge[Int]] =
      sc.parallelize(Array(
        Edge(2L, 1L, 7),
        Edge(2L, 4L, 2),
        Edge(3L, 2L, 4),
        Edge(3L, 6L, 3),
        Edge(4L, 1L, 1),
        Edge(5L, 2L, 2),
        Edge(5L, 3L, 8),
        Edge(5L, 6L, 3)
      ))

    // Build the initial Graph
    val graph = Graph(users, relationships)

    //通过获取图的顶点入度得到一个RDD
    val rdd: RDD[(Long, Boolean)] =
      sc.parallelize(Array(
        (1L, (true)),
        (2L, (true)),
        (3L, (true)),
        (4L, (true)),
        (5L, (true)),
        (6L, (false))
      ))

    var graphx: VertexRDD[Int] = graph.inDegrees
   graphx.collect.foreach(println(_))
   println("******************************************")
    graph.vertices.collect.foreach(println(_))

    var graph2: Graph[(String, Int), Int] = graph.outerJoinVertices(rdd)((id, attr,inNum) => (
      inNum match {
      case Some(true) => (attr._1, attr._2+1)
      case Some(false) => (attr._1, attr._2-1)
      }
      ))

    println("******************************************")
    graph2.vertices.collect.foreach(println(_))
  }
}
